Lines Matching refs:sdma

238 	struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan);
247 base = sdma->base;
253 if (sdma->type == SIRFSOC_DMA_VER_A7V2)
257 sdma->exec_desc(sdesc, cid, schan->mode, base);
266 struct sirfsoc_dma *sdma = data;
274 switch (sdma->type) {
277 is = readl(sdma->base + SIRFSOC_DMA_CH_INT);
278 reg = sdma->base + SIRFSOC_DMA_CH_INT;
282 schan = &sdma->channels[ch];
300 is = readl(sdma->base + SIRFSOC_DMA_INT_ATLAS7);
302 reg = sdma->base + SIRFSOC_DMA_INT_ATLAS7;
304 schan = &sdma->channels[0];
332 tasklet_schedule(&sdma->tasklet);
338 static void sirfsoc_dma_process_completed(struct sirfsoc_dma *sdma)
349 for (i = 0; i < sdma->dma.chancnt; i++) {
350 schan = &sdma->channels[i];
398 struct sirfsoc_dma *sdma = from_tasklet(sdma, t, tasklet);
400 sirfsoc_dma_process_completed(sdma);
445 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan);
451 switch (sdma->type) {
453 writel_relaxed(1 << cid, sdma->base + SIRFSOC_DMA_INT_EN_CLR);
454 writel_relaxed(1 << cid, sdma->base + SIRFSOC_DMA_CH_INT);
456 sdma->base +
458 writel_relaxed(1 << cid, sdma->base + SIRFSOC_DMA_CH_VALID);
461 writel_relaxed(0, sdma->base + SIRFSOC_DMA_INT_EN_ATLAS7);
463 sdma->base + SIRFSOC_DMA_INT_ATLAS7);
464 writel_relaxed(0, sdma->base + SIRFSOC_DMA_LOOP_CTRL_ATLAS7);
465 writel_relaxed(0, sdma->base + SIRFSOC_DMA_VALID_ATLAS7);
468 writel_relaxed(readl_relaxed(sdma->base + SIRFSOC_DMA_INT_EN) &
469 ~(1 << cid), sdma->base + SIRFSOC_DMA_INT_EN);
470 writel_relaxed(readl_relaxed(sdma->base +
473 sdma->base + SIRFSOC_DMA_CH_LOOP_CTRL);
474 writel_relaxed(1 << cid, sdma->base + SIRFSOC_DMA_CH_VALID);
491 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan);
497 switch (sdma->type) {
500 sdma->base +
504 writel_relaxed(0, sdma->base + SIRFSOC_DMA_LOOP_CTRL_ATLAS7);
507 writel_relaxed(readl_relaxed(sdma->base +
510 sdma->base + SIRFSOC_DMA_CH_LOOP_CTRL);
525 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(&schan->chan);
530 switch (sdma->type) {
533 sdma->base + SIRFSOC_DMA_CH_LOOP_CTRL_ATLAS7);
537 sdma->base + SIRFSOC_DMA_LOOP_CTRL_ATLAS7);
540 writel_relaxed(readl_relaxed(sdma->base +
543 sdma->base + SIRFSOC_DMA_CH_LOOP_CTRL);
558 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(chan);
565 pm_runtime_get_sync(sdma->dma.dev);
571 dev_notice(sdma->dma.dev, "Memory allocation error. "
599 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(chan);
621 pm_runtime_put(sdma->dma.dev);
643 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(chan);
670 if (sdma->type == SIRFSOC_DMA_VER_A7V2)
673 if (sdma->type == SIRFSOC_DMA_VER_A7V2) {
674 dma_pos = readl_relaxed(sdma->base + SIRFSOC_DMA_CUR_DATA_ADDR);
677 sdma->base + cid * 0x10 + SIRFSOC_DMA_CH_ADDR) << 2;
692 struct sirfsoc_dma *sdma = dma_chan_to_sirfsoc_dma(chan);
714 sirfsoc_dma_process_completed(sdma);
830 struct sirfsoc_dma *sdma = ofdma->of_dma_data;
836 return dma_get_slave_channel(&sdma->channels[request].chan);
844 struct sirfsoc_dma *sdma;
852 sdma = devm_kzalloc(dev, sizeof(*sdma), GFP_KERNEL);
853 if (!sdma)
859 sdma->exec_desc = data->exec;
860 sdma->type = data->type;
867 sdma->irq = irq_of_parse_and_map(dn, 0);
868 if (!sdma->irq) {
873 sdma->clk = devm_clk_get(dev, NULL);
874 if (IS_ERR(sdma->clk)) {
876 return PTR_ERR(sdma->clk);
888 sdma->base = devm_ioremap(dev, regs_start, regs_size);
889 if (!sdma->base) {
895 ret = request_irq(sdma->irq, &sirfsoc_dma_irq, 0, DRV_NAME, sdma);
902 dma = &sdma->dma;
926 schan = &sdma->channels[i];
941 tasklet_setup(&sdma->tasklet, sirfsoc_dma_tasklet);
944 dev_set_drvdata(dev, sdma);
951 ret = of_dma_controller_register(dn, of_dma_sirfsoc_xlate, sdma);
965 free_irq(sdma->irq, sdma);
967 irq_dispose_mapping(sdma->irq);
974 struct sirfsoc_dma *sdma = dev_get_drvdata(dev);
977 dma_async_device_unregister(&sdma->dma);
978 free_irq(sdma->irq, sdma);
979 tasklet_kill(&sdma->tasklet);
980 irq_dispose_mapping(sdma->irq);
990 struct sirfsoc_dma *sdma = dev_get_drvdata(dev);
992 clk_disable_unprepare(sdma->clk);
998 struct sirfsoc_dma *sdma = dev_get_drvdata(dev);
1001 ret = clk_prepare_enable(sdma->clk);
1011 struct sirfsoc_dma *sdma = dev_get_drvdata(dev);
1012 struct sirfsoc_dma_regs *save = &sdma->regs_save;
1029 if (sdma->type == SIRFSOC_DMA_VER_A7V2) {
1042 schan = &sdma->channels[ch];
1045 save->ctrl[ch] = readl_relaxed(sdma->base +
1048 save->interrupt_en = readl_relaxed(sdma->base + int_offset);
1058 struct sirfsoc_dma *sdma = dev_get_drvdata(dev);
1059 struct sirfsoc_dma_regs *save = &sdma->regs_save;
1073 if (sdma->type == SIRFSOC_DMA_VER_A7V2) {
1083 writel_relaxed(save->interrupt_en, sdma->base + int_offset);
1085 schan = &sdma->channels[ch];
1092 sdma->base + width_offset + ch * 4);
1094 sdma->base + ch * 0x10 + SIRFSOC_DMA_CH_XLEN);
1096 sdma->base + ch * 0x10 + SIRFSOC_DMA_CH_YLEN);
1098 sdma->base + ch * 0x10 + SIRFSOC_DMA_CH_CTRL);
1099 if (sdma->type == SIRFSOC_DMA_VER_A7V2) {
1101 sdma->base + SIRFSOC_DMA_CH_ADDR);
1104 sdma->base + ch * 0x10 + SIRFSOC_DMA_CH_ADDR);