Lines Matching defs:adev
53 [PPC_ADMA_INIT_ALLOC] = "failed to allocate memory for adev "
3767 static void ppc440spe_adma_init_capabilities(struct ppc440spe_adma_device *adev)
3769 switch (adev->id) {
3772 dma_cap_set(DMA_MEMCPY, adev->common.cap_mask);
3773 dma_cap_set(DMA_INTERRUPT, adev->common.cap_mask);
3774 dma_cap_set(DMA_PQ, adev->common.cap_mask);
3775 dma_cap_set(DMA_PQ_VAL, adev->common.cap_mask);
3776 dma_cap_set(DMA_XOR_VAL, adev->common.cap_mask);
3779 dma_cap_set(DMA_XOR, adev->common.cap_mask);
3780 dma_cap_set(DMA_PQ, adev->common.cap_mask);
3781 dma_cap_set(DMA_INTERRUPT, adev->common.cap_mask);
3782 adev->common.cap_mask = adev->common.cap_mask;
3787 adev->common.device_alloc_chan_resources =
3789 adev->common.device_free_chan_resources =
3791 adev->common.device_tx_status = ppc440spe_adma_tx_status;
3792 adev->common.device_issue_pending = ppc440spe_adma_issue_pending;
3795 if (dma_has_cap(DMA_MEMCPY, adev->common.cap_mask)) {
3796 adev->common.device_prep_dma_memcpy =
3799 if (dma_has_cap(DMA_XOR, adev->common.cap_mask)) {
3800 adev->common.max_xor = XOR_MAX_OPS;
3801 adev->common.device_prep_dma_xor =
3804 if (dma_has_cap(DMA_PQ, adev->common.cap_mask)) {
3805 switch (adev->id) {
3807 dma_set_maxpq(&adev->common,
3811 dma_set_maxpq(&adev->common,
3815 adev->common.max_pq = XOR_MAX_OPS * 3;
3818 adev->common.device_prep_dma_pq =
3821 if (dma_has_cap(DMA_PQ_VAL, adev->common.cap_mask)) {
3822 switch (adev->id) {
3824 adev->common.max_pq = DMA0_FIFO_SIZE /
3828 adev->common.max_pq = DMA1_FIFO_SIZE /
3832 adev->common.device_prep_dma_pq_val =
3835 if (dma_has_cap(DMA_XOR_VAL, adev->common.cap_mask)) {
3836 switch (adev->id) {
3838 adev->common.max_xor = DMA0_FIFO_SIZE /
3842 adev->common.max_xor = DMA1_FIFO_SIZE /
3846 adev->common.device_prep_dma_xor_val =
3849 if (dma_has_cap(DMA_INTERRUPT, adev->common.cap_mask)) {
3850 adev->common.device_prep_dma_interrupt =
3855 dev_name(adev->dev),
3856 dma_has_cap(DMA_PQ, adev->common.cap_mask) ? "pq " : "",
3857 dma_has_cap(DMA_PQ_VAL, adev->common.cap_mask) ? "pq_val " : "",
3858 dma_has_cap(DMA_XOR, adev->common.cap_mask) ? "xor " : "",
3859 dma_has_cap(DMA_XOR_VAL, adev->common.cap_mask) ? "xor_val " : "",
3860 dma_has_cap(DMA_MEMCPY, adev->common.cap_mask) ? "memcpy " : "",
3861 dma_has_cap(DMA_INTERRUPT, adev->common.cap_mask) ? "intr " : "");
3864 static int ppc440spe_adma_setup_irqs(struct ppc440spe_adma_device *adev,
3872 ofdev = container_of(adev->dev, struct platform_device, dev);
3874 if (adev->id != PPC440SPE_XOR_ID) {
3875 adev->err_irq = irq_of_parse_and_map(np, 1);
3876 if (!adev->err_irq) {
3877 dev_warn(adev->dev, "no err irq resource?\n");
3879 adev->err_irq = -ENXIO;
3883 adev->err_irq = -ENXIO;
3886 adev->irq = irq_of_parse_and_map(np, 0);
3887 if (!adev->irq) {
3888 dev_err(adev->dev, "no irq resource\n");
3893 dev_dbg(adev->dev, "irq %d, err irq %d\n",
3894 adev->irq, adev->err_irq);
3896 ret = request_irq(adev->irq, ppc440spe_adma_eot_handler,
3897 0, dev_driver_string(adev->dev), chan);
3899 dev_err(adev->dev, "can't request irq %d\n",
3900 adev->irq);
3909 if (adev->err_irq > 0) {
3911 ret = request_irq(adev->err_irq,
3914 dev_driver_string(adev->dev),
3917 dev_err(adev->dev, "can't request irq %d\n",
3918 adev->err_irq);
3925 if (adev->id == PPC440SPE_XOR_ID) {
3929 &adev->xor_reg->ier);
3940 adev->i2o_reg = of_iomap(np, 0);
3941 if (!adev->i2o_reg) {
3951 enable = (adev->id == PPC440SPE_DMA0_ID) ?
3954 mask = ioread32(&adev->i2o_reg->iopim) & enable;
3955 iowrite32(mask, &adev->i2o_reg->iopim);
3960 free_irq(adev->irq, chan);
3962 irq_dispose_mapping(adev->irq);
3964 if (adev->err_irq > 0) {
3966 irq_dispose_mapping(adev->err_irq);
3971 static void ppc440spe_adma_release_irqs(struct ppc440spe_adma_device *adev,
3976 if (adev->id == PPC440SPE_XOR_ID) {
3978 mask = ioread32be(&adev->xor_reg->ier);
3981 iowrite32be(mask, &adev->xor_reg->ier);
3984 disable = (adev->id == PPC440SPE_DMA0_ID) ?
3987 mask = ioread32(&adev->i2o_reg->iopim) | disable;
3988 iowrite32(mask, &adev->i2o_reg->iopim);
3990 free_irq(adev->irq, chan);
3991 irq_dispose_mapping(adev->irq);
3992 if (adev->err_irq > 0) {
3993 free_irq(adev->err_irq, chan);
3995 irq_dispose_mapping(adev->err_irq);
3996 iounmap(adev->i2o_reg);
4008 struct ppc440spe_adma_device *adev;
4067 adev = kzalloc(sizeof(*adev), GFP_KERNEL);
4068 if (!adev) {
4074 adev->id = id;
4075 adev->pool_size = pool_size;
4077 adev->dma_desc_pool_virt = dma_alloc_coherent(&ofdev->dev,
4078 adev->pool_size, &adev->dma_desc_pool,
4080 if (adev->dma_desc_pool_virt == NULL) {
4083 adev->pool_size);
4089 adev->dma_desc_pool_virt, (u64)adev->dma_desc_pool);
4098 if (adev->id == PPC440SPE_XOR_ID) {
4099 adev->xor_reg = regs;
4101 iowrite32be(XOR_CRSR_XASR_BIT, &adev->xor_reg->crsr);
4102 iowrite32be(XOR_CRSR_64BA_BIT, &adev->xor_reg->crrr);
4104 size_t fifo_size = (adev->id == PPC440SPE_DMA0_ID) ?
4106 adev->dma_reg = regs;
4113 &adev->dma_reg->fsiz);
4116 &adev->dma_reg->cfg);
4118 iowrite32(~0, &adev->dma_reg->dsts);
4121 adev->dev = &ofdev->dev;
4122 adev->common.dev = &ofdev->dev;
4123 INIT_LIST_HEAD(&adev->common.channels);
4124 platform_set_drvdata(ofdev, adev);
4137 chan->device = adev;
4138 chan->common.device = &adev->common;
4140 list_add_tail(&chan->common.device_node, &adev->common.channels);
4146 if (adev->id != PPC440SPE_XOR_ID) {
4175 ret = ppc440spe_adma_setup_irqs(adev, chan, &initcode);
4179 ppc440spe_adma_init_capabilities(adev);
4181 ret = dma_async_device_register(&adev->common);
4191 ppc440spe_adma_release_irqs(adev, chan);
4200 if (adev->id != PPC440SPE_XOR_ID) {
4211 if (adev->id == PPC440SPE_XOR_ID)
4212 iounmap(adev->xor_reg);
4214 iounmap(adev->dma_reg);
4216 dma_free_coherent(adev->dev, adev->pool_size,
4217 adev->dma_desc_pool_virt,
4218 adev->dma_desc_pool);
4220 kfree(adev);
4235 struct ppc440spe_adma_device *adev = platform_get_drvdata(ofdev);
4242 if (adev->id < PPC440SPE_ADMA_ENGINES_NUM)
4243 ppc440spe_adma_devices[adev->id] = -1;
4245 dma_async_device_unregister(&adev->common);
4247 list_for_each_entry_safe(chan, _chan, &adev->common.channels,
4250 ppc440spe_adma_release_irqs(adev, ppc440spe_chan);
4252 if (adev->id != PPC440SPE_XOR_ID) {
4272 dma_free_coherent(adev->dev, adev->pool_size,
4273 adev->dma_desc_pool_virt, adev->dma_desc_pool);
4274 if (adev->id == PPC440SPE_XOR_ID)
4275 iounmap(adev->xor_reg);
4277 iounmap(adev->dma_reg);
4280 kfree(adev);