Lines Matching refs:mdma

256 	struct mpc_dma *mdma = dma_chan_to_mpc_dma(&mchan->chan);
300 memcpy_toio(&mdma->tcd[cid], first->tcd, sizeof(struct mpc_dma_tcd));
303 mdma->tcd[cid].e_sg = 1;
305 if (mdma->is_mpc8308) {
307 out_8(&mdma->regs->dmassrt, cid);
310 out_8(&mdma->regs->dmaserq, cid);
313 out_8(&mdma->regs->dmassrt, cid);
318 static void mpc_dma_irq_process(struct mpc_dma *mdma, u32 is, u32 es, int off)
327 mchan = &mdma->channels[ch + off];
331 out_8(&mdma->regs->dmacint, ch + off);
332 out_8(&mdma->regs->dmacerr, ch + off);
351 struct mpc_dma *mdma = data;
355 es = in_be32(&mdma->regs->dmaes);
356 spin_lock(&mdma->error_status_lock);
357 if ((es & MPC_DMA_DMAES_VLD) && mdma->error_status == 0)
358 mdma->error_status = es;
359 spin_unlock(&mdma->error_status_lock);
362 if (mdma->dma.chancnt > 32) {
363 mpc_dma_irq_process(mdma, in_be32(&mdma->regs->dmainth),
364 in_be32(&mdma->regs->dmaerrh), 32);
366 mpc_dma_irq_process(mdma, in_be32(&mdma->regs->dmaintl),
367 in_be32(&mdma->regs->dmaerrl), 0);
370 tasklet_schedule(&mdma->tasklet);
376 static void mpc_dma_process_completed(struct mpc_dma *mdma)
386 for (i = 0; i < mdma->dma.chancnt; i++) {
387 mchan = &mdma->channels[i];
419 struct mpc_dma *mdma = from_tasklet(mdma, t, tasklet);
423 spin_lock_irqsave(&mdma->error_status_lock, flags);
424 es = mdma->error_status;
425 mdma->error_status = 0;
426 spin_unlock_irqrestore(&mdma->error_status_lock, flags);
430 dev_err(mdma->dma.dev,
435 dev_err(mdma->dma.dev, "- Group Priority Error\n");
437 dev_err(mdma->dma.dev, "- Channel Priority Error\n");
439 dev_err(mdma->dma.dev, "- Source Address Error\n");
441 dev_err(mdma->dma.dev, "- Source Offset Configuration Error\n");
443 dev_err(mdma->dma.dev, "- Destination Address Error\n");
445 dev_err(mdma->dma.dev, "- Destination Offset Configuration Error\n");
447 dev_err(mdma->dma.dev, "- NBytes/Citter Configuration Error\n");
449 dev_err(mdma->dma.dev, "- Scatter/Gather Configuration Error\n");
451 dev_err(mdma->dma.dev, "- Source Bus Error\n");
453 dev_err(mdma->dma.dev, "- Destination Bus Error\n");
456 mpc_dma_process_completed(mdma);
488 struct mpc_dma *mdma = dma_chan_to_mpc_dma(chan);
498 tcd = dma_alloc_coherent(mdma->dma.dev,
508 dev_notice(mdma->dma.dev,
525 dma_free_coherent(mdma->dma.dev,
538 out_8(&mdma->regs->dmaseei, chan->chan_id);
546 struct mpc_dma *mdma = dma_chan_to_mpc_dma(chan);
570 dma_free_coherent(mdma->dma.dev,
579 out_8(&mdma->regs->dmaceei, chan->chan_id);
604 struct mpc_dma *mdma = dma_chan_to_mpc_dma(chan);
621 mpc_dma_process_completed(mdma);
637 } else if (!mdma->is_mpc8308 && IS_ALIGNED(src | dst | len, 16)) {
688 struct mpc_dma *mdma = dma_chan_to_mpc_dma(chan);
714 mpc_dma_process_completed(mdma);
764 if (mdma->is_mpc8308) {
832 struct mpc_dma *mdma = dma_chan_to_mpc_dma(&mchan->chan);
854 if (!is_buswidth_valid(cfg->src_addr_width, mdma->is_mpc8308) ||
855 !is_buswidth_valid(cfg->dst_addr_width, mdma->is_mpc8308))
881 struct mpc_dma *mdma = dma_chan_to_mpc_dma(chan);
887 out_8(&mdma->regs->dmacerq, chan->chan_id);
902 struct mpc_dma *mdma;
909 mdma = devm_kzalloc(dev, sizeof(struct mpc_dma), GFP_KERNEL);
910 if (!mdma) {
915 mdma->irq = irq_of_parse_and_map(dn, 0);
916 if (!mdma->irq) {
923 mdma->is_mpc8308 = 1;
924 mdma->irq2 = irq_of_parse_and_map(dn, 1);
925 if (!mdma->irq2) {
947 mdma->regs = devm_ioremap(dev, regs_start, regs_size);
948 if (!mdma->regs) {
954 mdma->tcd = (struct mpc_dma_tcd *)((u8 *)(mdma->regs)
957 retval = request_irq(mdma->irq, &mpc_dma_irq, 0, DRV_NAME, mdma);
964 if (mdma->is_mpc8308) {
965 retval = request_irq(mdma->irq2, &mpc_dma_irq, 0,
966 DRV_NAME, mdma);
974 spin_lock_init(&mdma->error_status_lock);
976 dma = &mdma->dma;
991 if (mdma->is_mpc8308)
997 mchan = &mdma->channels[i];
1012 tasklet_setup(&mdma->tasklet, mpc_dma_tasklet);
1020 if (mdma->is_mpc8308) {
1022 out_be32(&mdma->regs->dmacr, MPC_DMA_DMACR_ERCA);
1025 out_be32(&mdma->regs->dmagpor, MPC_DMA_DMAGPOR_SNOOP_ENABLE);
1027 out_be32(&mdma->regs->dmaeeil, 0);
1030 out_be32(&mdma->regs->dmaintl, 0xFFFF);
1031 out_be32(&mdma->regs->dmaerrl, 0xFFFF);
1033 out_be32(&mdma->regs->dmacr, MPC_DMA_DMACR_EDCG |
1038 out_be32(&mdma->regs->dmaerqh, 0);
1039 out_be32(&mdma->regs->dmaerql, 0);
1042 out_be32(&mdma->regs->dmaeeih, 0);
1043 out_be32(&mdma->regs->dmaeeil, 0);
1046 out_be32(&mdma->regs->dmainth, 0xFFFFFFFF);
1047 out_be32(&mdma->regs->dmaintl, 0xFFFFFFFF);
1048 out_be32(&mdma->regs->dmaerrh, 0xFFFFFFFF);
1049 out_be32(&mdma->regs->dmaerrl, 0xFFFFFFFF);
1052 out_be32(&mdma->regs->dmaihsa, 0);
1053 out_be32(&mdma->regs->dmailsa, 0);
1057 dev_set_drvdata(dev, mdma);
1065 of_dma_xlate_by_chan_id, mdma);
1073 if (mdma->is_mpc8308)
1074 free_irq(mdma->irq2, mdma);
1076 free_irq(mdma->irq, mdma);
1078 if (mdma->is_mpc8308)
1079 irq_dispose_mapping(mdma->irq2);
1081 irq_dispose_mapping(mdma->irq);
1089 struct mpc_dma *mdma = dev_get_drvdata(dev);
1093 dma_async_device_unregister(&mdma->dma);
1094 if (mdma->is_mpc8308) {
1095 free_irq(mdma->irq2, mdma);
1096 irq_dispose_mapping(mdma->irq2);
1098 free_irq(mdma->irq, mdma);
1099 irq_dispose_mapping(mdma->irq);
1100 tasklet_kill(&mdma->tasklet);