Lines Matching defs:ioat_chan

121 		  struct ioatdma_chan *ioat_chan, int idx);
405 struct ioatdma_chan *ioat_chan;
434 ioat_chan = ioat_chan_by_index(ioat_dma, i);
437 "ioat-msix", ioat_chan);
441 ioat_chan = ioat_chan_by_index(ioat_dma, j);
442 devm_free_irq(dev, msix->vector, ioat_chan);
567 struct ioatdma_chan *ioat_chan;
588 ioat_chan = kzalloc(sizeof(*ioat_chan), GFP_KERNEL);
589 if (!ioat_chan)
592 ioat_init_channel(ioat_dma, ioat_chan, i);
593 ioat_chan->xfercap_log = xfercap_log;
594 spin_lock_init(&ioat_chan->prep_lock);
595 if (ioat_reset_hw(ioat_chan)) {
609 struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
610 struct ioatdma_device *ioat_dma = ioat_chan->ioat_dma;
612 const int total_descs = 1 << ioat_chan->alloc_order;
619 if (!ioat_chan->ring)
622 ioat_stop(ioat_chan);
624 if (!test_bit(IOAT_CHAN_DOWN, &ioat_chan->state)) {
625 ioat_reset_hw(ioat_chan);
630 ioat_chan->reg_base +
634 spin_lock_bh(&ioat_chan->cleanup_lock);
635 spin_lock_bh(&ioat_chan->prep_lock);
636 descs = ioat_ring_space(ioat_chan);
637 dev_dbg(to_dev(ioat_chan), "freeing %d idle descriptors\n", descs);
639 desc = ioat_get_ring_ent(ioat_chan, ioat_chan->head + i);
644 dev_err(to_dev(ioat_chan), "Freeing %d in use descriptors!\n",
648 desc = ioat_get_ring_ent(ioat_chan, ioat_chan->tail + i);
649 dump_desc_dbg(ioat_chan, desc);
653 for (i = 0; i < ioat_chan->desc_chunks; i++) {
654 dma_free_coherent(to_dev(ioat_chan), IOAT_CHUNK_SIZE,
655 ioat_chan->descs[i].virt,
656 ioat_chan->descs[i].hw);
657 ioat_chan->descs[i].virt = NULL;
658 ioat_chan->descs[i].hw = 0;
660 ioat_chan->desc_chunks = 0;
662 kfree(ioat_chan->ring);
663 ioat_chan->ring = NULL;
664 ioat_chan->alloc_order = 0;
665 dma_pool_free(ioat_dma->completion_pool, ioat_chan->completion,
666 ioat_chan->completion_dma);
667 spin_unlock_bh(&ioat_chan->prep_lock);
668 spin_unlock_bh(&ioat_chan->cleanup_lock);
670 ioat_chan->last_completion = 0;
671 ioat_chan->completion_dma = 0;
672 ioat_chan->dmacount = 0;
680 struct ioatdma_chan *ioat_chan = to_ioat_chan(c);
688 if (ioat_chan->ring)
689 return 1 << ioat_chan->alloc_order;
692 writew(IOAT_CHANCTRL_RUN, ioat_chan->reg_base + IOAT_CHANCTRL_OFFSET);
696 ioat_chan->completion =
697 dma_pool_zalloc(ioat_chan->ioat_dma->completion_pool,
698 GFP_NOWAIT, &ioat_chan->completion_dma);
699 if (!ioat_chan->completion)
702 writel(((u64)ioat_chan->completion_dma) & 0x00000000FFFFFFFF,
703 ioat_chan->reg_base + IOAT_CHANCMP_OFFSET_LOW);
704 writel(((u64)ioat_chan->completion_dma) >> 32,
705 ioat_chan->reg_base + IOAT_CHANCMP_OFFSET_HIGH);
712 spin_lock_bh(&ioat_chan->cleanup_lock);
713 spin_lock_bh(&ioat_chan->prep_lock);
714 ioat_chan->ring = ring;
715 ioat_chan->head = 0;
716 ioat_chan->issued = 0;
717 ioat_chan->tail = 0;
718 ioat_chan->alloc_order = order;
719 set_bit(IOAT_RUN, &ioat_chan->state);
720 spin_unlock_bh(&ioat_chan->prep_lock);
721 spin_unlock_bh(&ioat_chan->cleanup_lock);
724 if (ioat_chan->ioat_dma->version >= IOAT_VER_3_4) {
730 writel(lat_val, ioat_chan->reg_base +
736 writel(lat_val, ioat_chan->reg_base +
741 ioat_chan->reg_base +
745 ioat_start_null_desc(ioat_chan);
750 status = ioat_chansts(ioat_chan);
754 return 1 << ioat_chan->alloc_order;
756 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
758 dev_WARN(to_dev(ioat_chan),
767 struct ioatdma_chan *ioat_chan, int idx)
771 ioat_chan->ioat_dma = ioat_dma;
772 ioat_chan->reg_base = ioat_dma->reg_base + (0x80 * (idx + 1));
773 spin_lock_init(&ioat_chan->cleanup_lock);
774 ioat_chan->dma_chan.device = dma;
775 dma_cookie_init(&ioat_chan->dma_chan);
776 list_add_tail(&ioat_chan->dma_chan.device_node, &dma->channels);
777 ioat_dma->idx[idx] = ioat_chan;
778 timer_setup(&ioat_chan->timer, ioat_timer_event, 0);
779 tasklet_setup(&ioat_chan->cleanup_task, ioat_cleanup_event);
1065 struct ioatdma_chan *ioat_chan;
1076 ioat_chan = to_ioat_chan(c);
1077 errmask = readl(ioat_chan->reg_base +
1081 writel(errmask, ioat_chan->reg_base +
1093 struct ioatdma_chan *ioat_chan;
1179 ioat_chan = to_ioat_chan(c);
1181 ioat_chan->reg_base + IOAT_DCACTRL_OFFSET);
1214 struct ioatdma_chan *ioat_chan;
1221 ioat_chan = ioat_dma->idx[i];
1222 if (!ioat_chan)
1225 spin_lock_bh(&ioat_chan->prep_lock);
1226 set_bit(IOAT_CHAN_DOWN, &ioat_chan->state);
1227 spin_unlock_bh(&ioat_chan->prep_lock);
1234 del_timer_sync(&ioat_chan->timer);
1237 ioat_reset_hw(ioat_chan);
1245 struct ioatdma_chan *ioat_chan;
1250 ioat_chan = ioat_dma->idx[i];
1251 if (!ioat_chan)
1254 spin_lock_bh(&ioat_chan->prep_lock);
1255 clear_bit(IOAT_CHAN_DOWN, &ioat_chan->state);
1256 spin_unlock_bh(&ioat_chan->prep_lock);
1258 chanerr = readl(ioat_chan->reg_base + IOAT_CHANERR_OFFSET);
1259 writel(chanerr, ioat_chan->reg_base + IOAT_CHANERR_OFFSET);