Lines Matching refs:mdma
256 struct mpc_dma *mdma = dma_chan_to_mpc_dma(&mchan->chan);
300 memcpy_toio(&mdma->tcd[cid], first->tcd, sizeof(struct mpc_dma_tcd));
303 mdma->tcd[cid].e_sg = 1;
305 if (mdma->is_mpc8308) {
307 out_8(&mdma->regs->dmassrt, cid);
310 out_8(&mdma->regs->dmaserq, cid);
313 out_8(&mdma->regs->dmassrt, cid);
318 static void mpc_dma_irq_process(struct mpc_dma *mdma, u32 is, u32 es, int off)
327 mchan = &mdma->channels[ch + off];
331 out_8(&mdma->regs->dmacint, ch + off);
332 out_8(&mdma->regs->dmacerr, ch + off);
351 struct mpc_dma *mdma = data;
355 es = in_be32(&mdma->regs->dmaes);
356 spin_lock(&mdma->error_status_lock);
357 if ((es & MPC_DMA_DMAES_VLD) && mdma->error_status == 0)
358 mdma->error_status = es;
359 spin_unlock(&mdma->error_status_lock);
362 if (mdma->dma.chancnt > 32) {
363 mpc_dma_irq_process(mdma, in_be32(&mdma->regs->dmainth),
364 in_be32(&mdma->regs->dmaerrh), 32);
366 mpc_dma_irq_process(mdma, in_be32(&mdma->regs->dmaintl),
367 in_be32(&mdma->regs->dmaerrl), 0);
370 tasklet_schedule(&mdma->tasklet);
376 static void mpc_dma_process_completed(struct mpc_dma *mdma)
386 for (i = 0; i < mdma->dma.chancnt; i++) {
387 mchan = &mdma->channels[i];
419 struct mpc_dma *mdma = from_tasklet(mdma, t, tasklet);
423 spin_lock_irqsave(&mdma->error_status_lock, flags);
424 es = mdma->error_status;
425 mdma->error_status = 0;
426 spin_unlock_irqrestore(&mdma->error_status_lock, flags);
430 dev_err(mdma->dma.dev,
435 dev_err(mdma->dma.dev, "- Group Priority Error\n");
437 dev_err(mdma->dma.dev, "- Channel Priority Error\n");
439 dev_err(mdma->dma.dev, "- Source Address Error\n");
441 dev_err(mdma->dma.dev, "- Source Offset Configuration Error\n");
443 dev_err(mdma->dma.dev, "- Destination Address Error\n");
445 dev_err(mdma->dma.dev, "- Destination Offset Configuration Error\n");
447 dev_err(mdma->dma.dev, "- NBytes/Citter Configuration Error\n");
449 dev_err(mdma->dma.dev, "- Scatter/Gather Configuration Error\n");
451 dev_err(mdma->dma.dev, "- Source Bus Error\n");
453 dev_err(mdma->dma.dev, "- Destination Bus Error\n");
456 mpc_dma_process_completed(mdma);
488 struct mpc_dma *mdma = dma_chan_to_mpc_dma(chan);
498 tcd = dma_alloc_coherent(mdma->dma.dev,
508 dev_notice(mdma->dma.dev,
525 dma_free_coherent(mdma->dma.dev,
538 out_8(&mdma->regs->dmaseei, chan->chan_id);
546 struct mpc_dma *mdma = dma_chan_to_mpc_dma(chan);
570 dma_free_coherent(mdma->dma.dev,
579 out_8(&mdma->regs->dmaceei, chan->chan_id);
604 struct mpc_dma *mdma = dma_chan_to_mpc_dma(chan);
621 mpc_dma_process_completed(mdma);
637 } else if (!mdma->is_mpc8308 && IS_ALIGNED(src | dst | len, 16)) {
688 struct mpc_dma *mdma = dma_chan_to_mpc_dma(chan);
714 mpc_dma_process_completed(mdma);
764 if (mdma->is_mpc8308) {
833 struct mpc_dma *mdma = dma_chan_to_mpc_dma(&mchan->chan);
855 if (!is_buswidth_valid(cfg->src_addr_width, mdma->is_mpc8308) ||
856 !is_buswidth_valid(cfg->dst_addr_width, mdma->is_mpc8308))
882 struct mpc_dma *mdma = dma_chan_to_mpc_dma(chan);
888 out_8(&mdma->regs->dmacerq, chan->chan_id);
903 struct mpc_dma *mdma;
910 mdma = devm_kzalloc(dev, sizeof(struct mpc_dma), GFP_KERNEL);
911 if (!mdma) {
916 mdma->irq = irq_of_parse_and_map(dn, 0);
917 if (!mdma->irq) {
924 mdma->is_mpc8308 = 1;
925 mdma->irq2 = irq_of_parse_and_map(dn, 1);
926 if (!mdma->irq2) {
948 mdma->regs = devm_ioremap(dev, regs_start, regs_size);
949 if (!mdma->regs) {
955 mdma->tcd = (struct mpc_dma_tcd *)((u8 *)(mdma->regs)
958 retval = request_irq(mdma->irq, &mpc_dma_irq, 0, DRV_NAME, mdma);
965 if (mdma->is_mpc8308) {
966 retval = request_irq(mdma->irq2, &mpc_dma_irq, 0,
967 DRV_NAME, mdma);
975 spin_lock_init(&mdma->error_status_lock);
977 dma = &mdma->dma;
992 if (mdma->is_mpc8308)
998 mchan = &mdma->channels[i];
1013 tasklet_setup(&mdma->tasklet, mpc_dma_tasklet);
1021 if (mdma->is_mpc8308) {
1023 out_be32(&mdma->regs->dmacr, MPC_DMA_DMACR_ERCA);
1026 out_be32(&mdma->regs->dmagpor, MPC_DMA_DMAGPOR_SNOOP_ENABLE);
1028 out_be32(&mdma->regs->dmaeeil, 0);
1031 out_be32(&mdma->regs->dmaintl, 0xFFFF);
1032 out_be32(&mdma->regs->dmaerrl, 0xFFFF);
1034 out_be32(&mdma->regs->dmacr, MPC_DMA_DMACR_EDCG |
1039 out_be32(&mdma->regs->dmaerqh, 0);
1040 out_be32(&mdma->regs->dmaerql, 0);
1043 out_be32(&mdma->regs->dmaeeih, 0);
1044 out_be32(&mdma->regs->dmaeeil, 0);
1047 out_be32(&mdma->regs->dmainth, 0xFFFFFFFF);
1048 out_be32(&mdma->regs->dmaintl, 0xFFFFFFFF);
1049 out_be32(&mdma->regs->dmaerrh, 0xFFFFFFFF);
1050 out_be32(&mdma->regs->dmaerrl, 0xFFFFFFFF);
1053 out_be32(&mdma->regs->dmaihsa, 0);
1054 out_be32(&mdma->regs->dmailsa, 0);
1058 dev_set_drvdata(dev, mdma);
1066 of_dma_xlate_by_chan_id, mdma);
1074 if (mdma->is_mpc8308)
1075 free_irq(mdma->irq2, mdma);
1077 free_irq(mdma->irq, mdma);
1079 if (mdma->is_mpc8308)
1080 irq_dispose_mapping(mdma->irq2);
1082 irq_dispose_mapping(mdma->irq);
1090 struct mpc_dma *mdma = dev_get_drvdata(dev);
1094 dma_async_device_unregister(&mdma->dma);
1095 if (mdma->is_mpc8308) {
1096 free_irq(mdma->irq2, mdma);
1097 irq_dispose_mapping(mdma->irq2);
1099 free_irq(mdma->irq, mdma);
1100 irq_dispose_mapping(mdma->irq);
1101 tasklet_kill(&mdma->tasklet);