Lines Matching defs:ioat_dma

119 ioat_init_channel(struct ioatdma_device *ioat_dma,
121 static void ioat_intr_quirk(struct ioatdma_device *ioat_dma);
122 static void ioat_enumerate_channels(struct ioatdma_device *ioat_dma);
123 static int ioat3_dma_self_test(struct ioatdma_device *ioat_dma);
297 * @ioat_dma: dma device to be tested
299 static int ioat_dma_self_test(struct ioatdma_device *ioat_dma)
304 struct dma_device *dma = &ioat_dma->dma_dev;
305 struct device *dev = &ioat_dma->pdev->dev;
350 tx = ioat_dma->dma_dev.device_prep_dma_memcpy(dma_chan, dma_dest,
400 * @ioat_dma: ioat dma device
402 int ioat_dma_setup_interrupts(struct ioatdma_device *ioat_dma)
405 struct pci_dev *pdev = ioat_dma->pdev;
423 msixcnt = ioat_dma->chancnt;
425 ioat_dma->msix_entries[i].entry = i;
427 err = pci_enable_msix_exact(pdev, ioat_dma->msix_entries, msixcnt);
432 msix = &ioat_dma->msix_entries[i];
433 ioat_chan = ioat_chan_by_index(ioat_dma, i);
439 msix = &ioat_dma->msix_entries[j];
440 ioat_chan = ioat_chan_by_index(ioat_dma, j);
447 ioat_dma->irq_mode = IOAT_MSIX;
456 "ioat-msi", ioat_dma);
461 ioat_dma->irq_mode = IOAT_MSI;
466 IRQF_SHARED, "ioat-intx", ioat_dma);
470 ioat_dma->irq_mode = IOAT_INTX;
473 ioat_intr_quirk(ioat_dma);
475 writeb(intrctrl, ioat_dma->reg_base + IOAT_INTRCTRL_OFFSET);
480 writeb(0, ioat_dma->reg_base + IOAT_INTRCTRL_OFFSET);
481 ioat_dma->irq_mode = IOAT_NOIRQ;
486 static void ioat_disable_interrupts(struct ioatdma_device *ioat_dma)
489 writeb(0, ioat_dma->reg_base + IOAT_INTRCTRL_OFFSET);
492 static int ioat_probe(struct ioatdma_device *ioat_dma)
495 struct dma_device *dma = &ioat_dma->dma_dev;
496 struct pci_dev *pdev = ioat_dma->pdev;
499 ioat_dma->completion_pool = dma_pool_create("completion_pool", dev,
504 if (!ioat_dma->completion_pool) {
509 ioat_enumerate_channels(ioat_dma);
514 if (!ioat_dma->chancnt) {
519 err = ioat_dma_setup_interrupts(ioat_dma);
523 err = ioat3_dma_self_test(ioat_dma);
530 ioat_disable_interrupts(ioat_dma);
532 dma_pool_destroy(ioat_dma->completion_pool);
537 static int ioat_register(struct ioatdma_device *ioat_dma)
539 int err = dma_async_device_register(&ioat_dma->dma_dev);
542 ioat_disable_interrupts(ioat_dma);
543 dma_pool_destroy(ioat_dma->completion_pool);
549 static void ioat_dma_remove(struct ioatdma_device *ioat_dma)
551 struct dma_device *dma = &ioat_dma->dma_dev;
553 ioat_disable_interrupts(ioat_dma);
555 ioat_kobject_del(ioat_dma);
562 * @ioat_dma: the ioat dma device to be enumerated
564 static void ioat_enumerate_channels(struct ioatdma_device *ioat_dma)
567 struct device *dev = &ioat_dma->pdev->dev;
568 struct dma_device *dma = &ioat_dma->dma_dev;
574 chancnt = readb(ioat_dma->reg_base + IOAT_CHANCNT_OFFSET);
576 if (chancnt > ARRAY_SIZE(ioat_dma->idx)) {
578 chancnt, ARRAY_SIZE(ioat_dma->idx));
579 chancnt = ARRAY_SIZE(ioat_dma->idx);
581 xfercap_log = readb(ioat_dma->reg_base + IOAT_XFERCAP_OFFSET);
592 ioat_init_channel(ioat_dma, ioat_chan, i);
600 ioat_dma->chancnt = i;
610 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma;
628 if (ioat_dma->version >= IOAT_VER_3_4)
665 dma_pool_free(ioat_dma->completion_pool, ioat_chan->completion,
697 dma_pool_zalloc(ioat_chan->ioat_dma->completion_pool,
724 if (ioat_chan->ioat_dma->version >= IOAT_VER_3_4) {
766 ioat_init_channel(struct ioatdma_device *ioat_dma,
769 struct dma_device *dma = &ioat_dma->dma_dev;
771 ioat_chan->ioat_dma = ioat_dma;
772 ioat_chan->reg_base = ioat_dma->reg_base + (0x80 * (idx + 1));
777 ioat_dma->idx[idx] = ioat_chan;
783 static int ioat_xor_val_self_test(struct ioatdma_device *ioat_dma)
800 struct device *dev = &ioat_dma->pdev->dev;
801 struct dma_device *dma = &ioat_dma->dma_dev;
1048 static int ioat3_dma_self_test(struct ioatdma_device *ioat_dma)
1052 rc = ioat_dma_self_test(ioat_dma);
1056 rc = ioat_xor_val_self_test(ioat_dma);
1061 static void ioat_intr_quirk(struct ioatdma_device *ioat_dma)
1068 dma = &ioat_dma->dma_dev;
1074 if (ioat_dma->cap & IOAT_CAP_DWBES) {
1087 static int ioat3_dma_probe(struct ioatdma_device *ioat_dma, int dca)
1089 struct pci_dev *pdev = ioat_dma->pdev;
1097 dma = &ioat_dma->dma_dev;
1106 ioat_dma->cap = readl(ioat_dma->reg_base + IOAT_DMA_CAP_OFFSET);
1109 ioat_dma->cap &=
1113 if (dca_en && (ioat_dma->cap & (IOAT_CAP_XOR|IOAT_CAP_PQ)))
1114 ioat_dma->cap &= ~(IOAT_CAP_XOR|IOAT_CAP_PQ);
1116 if (ioat_dma->cap & IOAT_CAP_XOR) {
1126 if (ioat_dma->cap & IOAT_CAP_PQ) {
1133 if (ioat_dma->cap & IOAT_CAP_RAID16SS)
1138 if (!(ioat_dma->cap & IOAT_CAP_XOR)) {
1144 if (ioat_dma->cap & IOAT_CAP_RAID16SS)
1154 if (ioat_dma->cap & IOAT_CAP_RAID16SS) {
1162 ioat_dma->sed_hw_pool[i] = dmam_pool_create(pool_name,
1165 if (!ioat_dma->sed_hw_pool[i])
1171 if (!(ioat_dma->cap & (IOAT_CAP_XOR | IOAT_CAP_PQ)))
1174 err = ioat_probe(ioat_dma);
1184 err = ioat_register(ioat_dma);
1188 ioat_kobject_add(ioat_dma, &ioat_ktype);
1191 ioat_dma->dca = ioat_dca_init(pdev, ioat_dma->reg_base);
1204 if (ioat_dma->cap & IOAT_CAP_DPS)
1206 ioat_dma->reg_base + IOAT_PREFETCH_LIMIT_OFFSET);
1213 struct ioatdma_device *ioat_dma = pci_get_drvdata(pdev);
1217 if (!ioat_dma)
1221 ioat_chan = ioat_dma->idx[i];
1240 ioat_disable_interrupts(ioat_dma);
1243 static void ioat_resume(struct ioatdma_device *ioat_dma)
1250 ioat_chan = ioat_dma->idx[i];
1300 struct ioatdma_device *ioat_dma = pci_get_drvdata(pdev);
1305 ioat_resume(ioat_dma);