Lines Matching defs:adev
53 [PPC_ADMA_INIT_ALLOC] = "failed to allocate memory for adev "
3770 static void ppc440spe_adma_init_capabilities(struct ppc440spe_adma_device *adev)
3772 switch (adev->id) {
3775 dma_cap_set(DMA_MEMCPY, adev->common.cap_mask);
3776 dma_cap_set(DMA_INTERRUPT, adev->common.cap_mask);
3777 dma_cap_set(DMA_PQ, adev->common.cap_mask);
3778 dma_cap_set(DMA_PQ_VAL, adev->common.cap_mask);
3779 dma_cap_set(DMA_XOR_VAL, adev->common.cap_mask);
3782 dma_cap_set(DMA_XOR, adev->common.cap_mask);
3783 dma_cap_set(DMA_PQ, adev->common.cap_mask);
3784 dma_cap_set(DMA_INTERRUPT, adev->common.cap_mask);
3785 adev->common.cap_mask = adev->common.cap_mask;
3790 adev->common.device_alloc_chan_resources =
3792 adev->common.device_free_chan_resources =
3794 adev->common.device_tx_status = ppc440spe_adma_tx_status;
3795 adev->common.device_issue_pending = ppc440spe_adma_issue_pending;
3798 if (dma_has_cap(DMA_MEMCPY, adev->common.cap_mask)) {
3799 adev->common.device_prep_dma_memcpy =
3802 if (dma_has_cap(DMA_XOR, adev->common.cap_mask)) {
3803 adev->common.max_xor = XOR_MAX_OPS;
3804 adev->common.device_prep_dma_xor =
3807 if (dma_has_cap(DMA_PQ, adev->common.cap_mask)) {
3808 switch (adev->id) {
3810 dma_set_maxpq(&adev->common,
3814 dma_set_maxpq(&adev->common,
3818 adev->common.max_pq = XOR_MAX_OPS * 3;
3821 adev->common.device_prep_dma_pq =
3824 if (dma_has_cap(DMA_PQ_VAL, adev->common.cap_mask)) {
3825 switch (adev->id) {
3827 adev->common.max_pq = DMA0_FIFO_SIZE /
3831 adev->common.max_pq = DMA1_FIFO_SIZE /
3835 adev->common.device_prep_dma_pq_val =
3838 if (dma_has_cap(DMA_XOR_VAL, adev->common.cap_mask)) {
3839 switch (adev->id) {
3841 adev->common.max_xor = DMA0_FIFO_SIZE /
3845 adev->common.max_xor = DMA1_FIFO_SIZE /
3849 adev->common.device_prep_dma_xor_val =
3852 if (dma_has_cap(DMA_INTERRUPT, adev->common.cap_mask)) {
3853 adev->common.device_prep_dma_interrupt =
3858 dev_name(adev->dev),
3859 dma_has_cap(DMA_PQ, adev->common.cap_mask) ? "pq " : "",
3860 dma_has_cap(DMA_PQ_VAL, adev->common.cap_mask) ? "pq_val " : "",
3861 dma_has_cap(DMA_XOR, adev->common.cap_mask) ? "xor " : "",
3862 dma_has_cap(DMA_XOR_VAL, adev->common.cap_mask) ? "xor_val " : "",
3863 dma_has_cap(DMA_MEMCPY, adev->common.cap_mask) ? "memcpy " : "",
3864 dma_has_cap(DMA_INTERRUPT, adev->common.cap_mask) ? "intr " : "");
3867 static int ppc440spe_adma_setup_irqs(struct ppc440spe_adma_device *adev,
3875 ofdev = container_of(adev->dev, struct platform_device, dev);
3877 if (adev->id != PPC440SPE_XOR_ID) {
3878 adev->err_irq = irq_of_parse_and_map(np, 1);
3879 if (!adev->err_irq) {
3880 dev_warn(adev->dev, "no err irq resource?\n");
3882 adev->err_irq = -ENXIO;
3886 adev->err_irq = -ENXIO;
3889 adev->irq = irq_of_parse_and_map(np, 0);
3890 if (!adev->irq) {
3891 dev_err(adev->dev, "no irq resource\n");
3896 dev_dbg(adev->dev, "irq %d, err irq %d\n",
3897 adev->irq, adev->err_irq);
3899 ret = request_irq(adev->irq, ppc440spe_adma_eot_handler,
3900 0, dev_driver_string(adev->dev), chan);
3902 dev_err(adev->dev, "can't request irq %d\n",
3903 adev->irq);
3912 if (adev->err_irq > 0) {
3914 ret = request_irq(adev->err_irq,
3917 dev_driver_string(adev->dev),
3920 dev_err(adev->dev, "can't request irq %d\n",
3921 adev->err_irq);
3928 if (adev->id == PPC440SPE_XOR_ID) {
3932 &adev->xor_reg->ier);
3943 adev->i2o_reg = of_iomap(np, 0);
3944 if (!adev->i2o_reg) {
3954 enable = (adev->id == PPC440SPE_DMA0_ID) ?
3957 mask = ioread32(&adev->i2o_reg->iopim) & enable;
3958 iowrite32(mask, &adev->i2o_reg->iopim);
3963 free_irq(adev->irq, chan);
3965 irq_dispose_mapping(adev->irq);
3967 if (adev->err_irq > 0) {
3969 irq_dispose_mapping(adev->err_irq);
3974 static void ppc440spe_adma_release_irqs(struct ppc440spe_adma_device *adev,
3979 if (adev->id == PPC440SPE_XOR_ID) {
3981 mask = ioread32be(&adev->xor_reg->ier);
3984 iowrite32be(mask, &adev->xor_reg->ier);
3987 disable = (adev->id == PPC440SPE_DMA0_ID) ?
3990 mask = ioread32(&adev->i2o_reg->iopim) | disable;
3991 iowrite32(mask, &adev->i2o_reg->iopim);
3993 free_irq(adev->irq, chan);
3994 irq_dispose_mapping(adev->irq);
3995 if (adev->err_irq > 0) {
3996 free_irq(adev->err_irq, chan);
3998 irq_dispose_mapping(adev->err_irq);
3999 iounmap(adev->i2o_reg);
4011 struct ppc440spe_adma_device *adev;
4070 adev = kzalloc(sizeof(*adev), GFP_KERNEL);
4071 if (!adev) {
4077 adev->id = id;
4078 adev->pool_size = pool_size;
4080 adev->dma_desc_pool_virt = dma_alloc_coherent(&ofdev->dev,
4081 adev->pool_size, &adev->dma_desc_pool,
4083 if (adev->dma_desc_pool_virt == NULL) {
4086 adev->pool_size);
4092 adev->dma_desc_pool_virt, (u64)adev->dma_desc_pool);
4101 if (adev->id == PPC440SPE_XOR_ID) {
4102 adev->xor_reg = regs;
4104 iowrite32be(XOR_CRSR_XASR_BIT, &adev->xor_reg->crsr);
4105 iowrite32be(XOR_CRSR_64BA_BIT, &adev->xor_reg->crrr);
4107 size_t fifo_size = (adev->id == PPC440SPE_DMA0_ID) ?
4109 adev->dma_reg = regs;
4116 &adev->dma_reg->fsiz);
4119 &adev->dma_reg->cfg);
4121 iowrite32(~0, &adev->dma_reg->dsts);
4124 adev->dev = &ofdev->dev;
4125 adev->common.dev = &ofdev->dev;
4126 INIT_LIST_HEAD(&adev->common.channels);
4127 platform_set_drvdata(ofdev, adev);
4140 chan->device = adev;
4141 chan->common.device = &adev->common;
4143 list_add_tail(&chan->common.device_node, &adev->common.channels);
4149 if (adev->id != PPC440SPE_XOR_ID) {
4178 ret = ppc440spe_adma_setup_irqs(adev, chan, &initcode);
4182 ppc440spe_adma_init_capabilities(adev);
4184 ret = dma_async_device_register(&adev->common);
4194 ppc440spe_adma_release_irqs(adev, chan);
4203 if (adev->id != PPC440SPE_XOR_ID) {
4214 if (adev->id == PPC440SPE_XOR_ID)
4215 iounmap(adev->xor_reg);
4217 iounmap(adev->dma_reg);
4219 dma_free_coherent(adev->dev, adev->pool_size,
4220 adev->dma_desc_pool_virt,
4221 adev->dma_desc_pool);
4223 kfree(adev);
4238 struct ppc440spe_adma_device *adev = platform_get_drvdata(ofdev);
4245 if (adev->id < PPC440SPE_ADMA_ENGINES_NUM)
4246 ppc440spe_adma_devices[adev->id] = -1;
4248 dma_async_device_unregister(&adev->common);
4250 list_for_each_entry_safe(chan, _chan, &adev->common.channels,
4253 ppc440spe_adma_release_irqs(adev, ppc440spe_chan);
4255 if (adev->id != PPC440SPE_XOR_ID) {
4275 dma_free_coherent(adev->dev, adev->pool_size,
4276 adev->dma_desc_pool_virt, adev->dma_desc_pool);
4277 if (adev->id == PPC440SPE_XOR_ID)
4278 iounmap(adev->xor_reg);
4280 iounmap(adev->dma_reg);
4283 kfree(adev);