1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 *
4 * Copyright (C) STMicroelectronics SA 2017
5 * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
6 *            Pierre-Yves Mordret <pierre-yves.mordret@st.com>
7 *
8 * Driver for STM32 MDMA controller
9 *
10 * Inspired by stm32-dma.c and dma-jz4780.c
11 */
12
13#include <linux/bitfield.h>
14#include <linux/clk.h>
15#include <linux/delay.h>
16#include <linux/dmaengine.h>
17#include <linux/dma-mapping.h>
18#include <linux/dmapool.h>
19#include <linux/err.h>
20#include <linux/init.h>
21#include <linux/iopoll.h>
22#include <linux/jiffies.h>
23#include <linux/list.h>
24#include <linux/log2.h>
25#include <linux/module.h>
26#include <linux/of.h>
27#include <linux/of_dma.h>
28#include <linux/platform_device.h>
29#include <linux/pm_runtime.h>
30#include <linux/reset.h>
31#include <linux/slab.h>
32
33#include "virt-dma.h"
34
35#define STM32_MDMA_GISR0		0x0000 /* MDMA Int Status Reg 1 */
36
37/* MDMA Channel x interrupt/status register */
38#define STM32_MDMA_CISR(x)		(0x40 + 0x40 * (x)) /* x = 0..62 */
39#define STM32_MDMA_CISR_CRQA		BIT(16)
40#define STM32_MDMA_CISR_TCIF		BIT(4)
41#define STM32_MDMA_CISR_BTIF		BIT(3)
42#define STM32_MDMA_CISR_BRTIF		BIT(2)
43#define STM32_MDMA_CISR_CTCIF		BIT(1)
44#define STM32_MDMA_CISR_TEIF		BIT(0)
45
46/* MDMA Channel x interrupt flag clear register */
47#define STM32_MDMA_CIFCR(x)		(0x44 + 0x40 * (x))
48#define STM32_MDMA_CIFCR_CLTCIF		BIT(4)
49#define STM32_MDMA_CIFCR_CBTIF		BIT(3)
50#define STM32_MDMA_CIFCR_CBRTIF		BIT(2)
51#define STM32_MDMA_CIFCR_CCTCIF		BIT(1)
52#define STM32_MDMA_CIFCR_CTEIF		BIT(0)
53#define STM32_MDMA_CIFCR_CLEAR_ALL	(STM32_MDMA_CIFCR_CLTCIF \
54					| STM32_MDMA_CIFCR_CBTIF \
55					| STM32_MDMA_CIFCR_CBRTIF \
56					| STM32_MDMA_CIFCR_CCTCIF \
57					| STM32_MDMA_CIFCR_CTEIF)
58
59/* MDMA Channel x error status register */
60#define STM32_MDMA_CESR(x)		(0x48 + 0x40 * (x))
61#define STM32_MDMA_CESR_BSE		BIT(11)
62#define STM32_MDMA_CESR_ASR		BIT(10)
63#define STM32_MDMA_CESR_TEMD		BIT(9)
64#define STM32_MDMA_CESR_TELD		BIT(8)
65#define STM32_MDMA_CESR_TED		BIT(7)
66#define STM32_MDMA_CESR_TEA_MASK	GENMASK(6, 0)
67
68/* MDMA Channel x control register */
69#define STM32_MDMA_CCR(x)		(0x4C + 0x40 * (x))
70#define STM32_MDMA_CCR_SWRQ		BIT(16)
71#define STM32_MDMA_CCR_WEX		BIT(14)
72#define STM32_MDMA_CCR_HEX		BIT(13)
73#define STM32_MDMA_CCR_BEX		BIT(12)
74#define STM32_MDMA_CCR_SM		BIT(8)
75#define STM32_MDMA_CCR_PL_MASK		GENMASK(7, 6)
76#define STM32_MDMA_CCR_PL(n)		FIELD_PREP(STM32_MDMA_CCR_PL_MASK, (n))
77#define STM32_MDMA_CCR_TCIE		BIT(5)
78#define STM32_MDMA_CCR_BTIE		BIT(4)
79#define STM32_MDMA_CCR_BRTIE		BIT(3)
80#define STM32_MDMA_CCR_CTCIE		BIT(2)
81#define STM32_MDMA_CCR_TEIE		BIT(1)
82#define STM32_MDMA_CCR_EN		BIT(0)
83#define STM32_MDMA_CCR_IRQ_MASK		(STM32_MDMA_CCR_TCIE \
84					| STM32_MDMA_CCR_BTIE \
85					| STM32_MDMA_CCR_BRTIE \
86					| STM32_MDMA_CCR_CTCIE \
87					| STM32_MDMA_CCR_TEIE)
88
89/* MDMA Channel x transfer configuration register */
90#define STM32_MDMA_CTCR(x)		(0x50 + 0x40 * (x))
91#define STM32_MDMA_CTCR_BWM		BIT(31)
92#define STM32_MDMA_CTCR_SWRM		BIT(30)
93#define STM32_MDMA_CTCR_TRGM_MSK	GENMASK(29, 28)
94#define STM32_MDMA_CTCR_TRGM(n)		FIELD_PREP(STM32_MDMA_CTCR_TRGM_MSK, (n))
95#define STM32_MDMA_CTCR_TRGM_GET(n)	FIELD_GET(STM32_MDMA_CTCR_TRGM_MSK, (n))
96#define STM32_MDMA_CTCR_PAM_MASK	GENMASK(27, 26)
97#define STM32_MDMA_CTCR_PAM(n)		FIELD_PREP(STM32_MDMA_CTCR_PAM_MASK, (n))
98#define STM32_MDMA_CTCR_PKE		BIT(25)
99#define STM32_MDMA_CTCR_TLEN_MSK	GENMASK(24, 18)
100#define STM32_MDMA_CTCR_TLEN(n)		FIELD_PREP(STM32_MDMA_CTCR_TLEN_MSK, (n))
101#define STM32_MDMA_CTCR_TLEN_GET(n)	FIELD_GET(STM32_MDMA_CTCR_TLEN_MSK, (n))
102#define STM32_MDMA_CTCR_LEN2_MSK	GENMASK(25, 18)
103#define STM32_MDMA_CTCR_LEN2(n)		FIELD_PREP(STM32_MDMA_CTCR_LEN2_MSK, (n))
104#define STM32_MDMA_CTCR_LEN2_GET(n)	FIELD_GET(STM32_MDMA_CTCR_LEN2_MSK, (n))
105#define STM32_MDMA_CTCR_DBURST_MASK	GENMASK(17, 15)
106#define STM32_MDMA_CTCR_DBURST(n)	FIELD_PREP(STM32_MDMA_CTCR_DBURST_MASK, (n))
107#define STM32_MDMA_CTCR_SBURST_MASK	GENMASK(14, 12)
108#define STM32_MDMA_CTCR_SBURST(n)	FIELD_PREP(STM32_MDMA_CTCR_SBURST_MASK, (n))
109#define STM32_MDMA_CTCR_DINCOS_MASK	GENMASK(11, 10)
110#define STM32_MDMA_CTCR_DINCOS(n)	FIELD_PREP(STM32_MDMA_CTCR_DINCOS_MASK, (n))
111#define STM32_MDMA_CTCR_SINCOS_MASK	GENMASK(9, 8)
112#define STM32_MDMA_CTCR_SINCOS(n)	FIELD_PREP(STM32_MDMA_CTCR_SINCOS_MASK, (n))
113#define STM32_MDMA_CTCR_DSIZE_MASK	GENMASK(7, 6)
114#define STM32_MDMA_CTCR_DSIZE(n)	FIELD_PREP(STM32_MDMA_CTCR_DSIZE_MASK, (n))
115#define STM32_MDMA_CTCR_SSIZE_MASK	GENMASK(5, 4)
116#define STM32_MDMA_CTCR_SSIZE(n)	FIELD_PREP(STM32_MDMA_CTCR_SSIZE_MASK, (n))
117#define STM32_MDMA_CTCR_DINC_MASK	GENMASK(3, 2)
118#define STM32_MDMA_CTCR_DINC(n)		FIELD_PREP(STM32_MDMA_CTCR_DINC_MASK, (n))
119#define STM32_MDMA_CTCR_SINC_MASK	GENMASK(1, 0)
120#define STM32_MDMA_CTCR_SINC(n)		FIELD_PREP(STM32_MDMA_CTCR_SINC_MASK, (n))
121#define STM32_MDMA_CTCR_CFG_MASK	(STM32_MDMA_CTCR_SINC_MASK \
122					| STM32_MDMA_CTCR_DINC_MASK \
123					| STM32_MDMA_CTCR_SINCOS_MASK \
124					| STM32_MDMA_CTCR_DINCOS_MASK \
125					| STM32_MDMA_CTCR_LEN2_MSK \
126					| STM32_MDMA_CTCR_TRGM_MSK)
127
128/* MDMA Channel x block number of data register */
129#define STM32_MDMA_CBNDTR(x)		(0x54 + 0x40 * (x))
130#define STM32_MDMA_CBNDTR_BRC_MK	GENMASK(31, 20)
131#define STM32_MDMA_CBNDTR_BRC(n)	FIELD_PREP(STM32_MDMA_CBNDTR_BRC_MK, (n))
132#define STM32_MDMA_CBNDTR_BRC_GET(n)	FIELD_GET(STM32_MDMA_CBNDTR_BRC_MK, (n))
133
134#define STM32_MDMA_CBNDTR_BRDUM		BIT(19)
135#define STM32_MDMA_CBNDTR_BRSUM		BIT(18)
136#define STM32_MDMA_CBNDTR_BNDT_MASK	GENMASK(16, 0)
137#define STM32_MDMA_CBNDTR_BNDT(n)	FIELD_PREP(STM32_MDMA_CBNDTR_BNDT_MASK, (n))
138
139/* MDMA Channel x source address register */
140#define STM32_MDMA_CSAR(x)		(0x58 + 0x40 * (x))
141
142/* MDMA Channel x destination address register */
143#define STM32_MDMA_CDAR(x)		(0x5C + 0x40 * (x))
144
145/* MDMA Channel x block repeat address update register */
146#define STM32_MDMA_CBRUR(x)		(0x60 + 0x40 * (x))
147#define STM32_MDMA_CBRUR_DUV_MASK	GENMASK(31, 16)
148#define STM32_MDMA_CBRUR_DUV(n)		FIELD_PREP(STM32_MDMA_CBRUR_DUV_MASK, (n))
149#define STM32_MDMA_CBRUR_SUV_MASK	GENMASK(15, 0)
150#define STM32_MDMA_CBRUR_SUV(n)		FIELD_PREP(STM32_MDMA_CBRUR_SUV_MASK, (n))
151
152/* MDMA Channel x link address register */
153#define STM32_MDMA_CLAR(x)		(0x64 + 0x40 * (x))
154
155/* MDMA Channel x trigger and bus selection register */
156#define STM32_MDMA_CTBR(x)		(0x68 + 0x40 * (x))
157#define STM32_MDMA_CTBR_DBUS		BIT(17)
158#define STM32_MDMA_CTBR_SBUS		BIT(16)
159#define STM32_MDMA_CTBR_TSEL_MASK	GENMASK(5, 0)
160#define STM32_MDMA_CTBR_TSEL(n)		FIELD_PREP(STM32_MDMA_CTBR_TSEL_MASK, (n))
161
162/* MDMA Channel x mask address register */
163#define STM32_MDMA_CMAR(x)		(0x70 + 0x40 * (x))
164
165/* MDMA Channel x mask data register */
166#define STM32_MDMA_CMDR(x)		(0x74 + 0x40 * (x))
167
168#define STM32_MDMA_MAX_BUF_LEN		128
169#define STM32_MDMA_MAX_BLOCK_LEN	65536
170#define STM32_MDMA_MAX_CHANNELS		32
171#define STM32_MDMA_MAX_REQUESTS		256
172#define STM32_MDMA_MAX_BURST		128
173#define STM32_MDMA_VERY_HIGH_PRIORITY	0x3
174
175enum stm32_mdma_trigger_mode {
176	STM32_MDMA_BUFFER,
177	STM32_MDMA_BLOCK,
178	STM32_MDMA_BLOCK_REP,
179	STM32_MDMA_LINKED_LIST,
180};
181
182enum stm32_mdma_width {
183	STM32_MDMA_BYTE,
184	STM32_MDMA_HALF_WORD,
185	STM32_MDMA_WORD,
186	STM32_MDMA_DOUBLE_WORD,
187};
188
189enum stm32_mdma_inc_mode {
190	STM32_MDMA_FIXED = 0,
191	STM32_MDMA_INC = 2,
192	STM32_MDMA_DEC = 3,
193};
194
195struct stm32_mdma_chan_config {
196	u32 request;
197	u32 priority_level;
198	u32 transfer_config;
199	u32 mask_addr;
200	u32 mask_data;
201	bool m2m_hw; /* True when MDMA is triggered by STM32 DMA */
202};
203
204struct stm32_mdma_hwdesc {
205	u32 ctcr;
206	u32 cbndtr;
207	u32 csar;
208	u32 cdar;
209	u32 cbrur;
210	u32 clar;
211	u32 ctbr;
212	u32 dummy;
213	u32 cmar;
214	u32 cmdr;
215} __aligned(64);
216
217struct stm32_mdma_desc_node {
218	struct stm32_mdma_hwdesc *hwdesc;
219	dma_addr_t hwdesc_phys;
220};
221
222struct stm32_mdma_desc {
223	struct virt_dma_desc vdesc;
224	u32 ccr;
225	bool cyclic;
226	u32 count;
227	struct stm32_mdma_desc_node node[];
228};
229
230struct stm32_mdma_dma_config {
231	u32 request;	/* STM32 DMA channel stream id, triggering MDMA */
232	u32 cmar;	/* STM32 DMA interrupt flag clear register address */
233	u32 cmdr;	/* STM32 DMA Transfer Complete flag */
234};
235
236struct stm32_mdma_chan {
237	struct virt_dma_chan vchan;
238	struct dma_pool *desc_pool;
239	u32 id;
240	struct stm32_mdma_desc *desc;
241	u32 curr_hwdesc;
242	struct dma_slave_config dma_config;
243	struct stm32_mdma_chan_config chan_config;
244	bool busy;
245	u32 mem_burst;
246	u32 mem_width;
247};
248
249struct stm32_mdma_device {
250	struct dma_device ddev;
251	void __iomem *base;
252	struct clk *clk;
253	int irq;
254	u32 nr_channels;
255	u32 nr_requests;
256	u32 nr_ahb_addr_masks;
257	u32 chan_reserved;
258	struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS];
259	u32 ahb_addr_masks[];
260};
261
262static struct stm32_mdma_device *stm32_mdma_get_dev(
263	struct stm32_mdma_chan *chan)
264{
265	return container_of(chan->vchan.chan.device, struct stm32_mdma_device,
266			    ddev);
267}
268
269static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c)
270{
271	return container_of(c, struct stm32_mdma_chan, vchan.chan);
272}
273
274static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc)
275{
276	return container_of(vdesc, struct stm32_mdma_desc, vdesc);
277}
278
279static struct device *chan2dev(struct stm32_mdma_chan *chan)
280{
281	return &chan->vchan.chan.dev->device;
282}
283
284static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev)
285{
286	return mdma_dev->ddev.dev;
287}
288
289static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
290{
291	return readl_relaxed(dmadev->base + reg);
292}
293
294static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
295{
296	writel_relaxed(val, dmadev->base + reg);
297}
298
299static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
300				u32 mask)
301{
302	void __iomem *addr = dmadev->base + reg;
303
304	writel_relaxed(readl_relaxed(addr) | mask, addr);
305}
306
307static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
308				u32 mask)
309{
310	void __iomem *addr = dmadev->base + reg;
311
312	writel_relaxed(readl_relaxed(addr) & ~mask, addr);
313}
314
315static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
316		struct stm32_mdma_chan *chan, u32 count)
317{
318	struct stm32_mdma_desc *desc;
319	int i;
320
321	desc = kzalloc(struct_size(desc, node, count), GFP_NOWAIT);
322	if (!desc)
323		return NULL;
324
325	for (i = 0; i < count; i++) {
326		desc->node[i].hwdesc =
327			dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
328				       &desc->node[i].hwdesc_phys);
329		if (!desc->node[i].hwdesc)
330			goto err;
331	}
332
333	desc->count = count;
334
335	return desc;
336
337err:
338	dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
339	while (--i >= 0)
340		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
341			      desc->node[i].hwdesc_phys);
342	kfree(desc);
343	return NULL;
344}
345
346static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
347{
348	struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
349	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
350	int i;
351
352	for (i = 0; i < desc->count; i++)
353		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
354			      desc->node[i].hwdesc_phys);
355	kfree(desc);
356}
357
358static int stm32_mdma_get_width(struct stm32_mdma_chan *chan,
359				enum dma_slave_buswidth width)
360{
361	switch (width) {
362	case DMA_SLAVE_BUSWIDTH_1_BYTE:
363	case DMA_SLAVE_BUSWIDTH_2_BYTES:
364	case DMA_SLAVE_BUSWIDTH_4_BYTES:
365	case DMA_SLAVE_BUSWIDTH_8_BYTES:
366		return ffs(width) - 1;
367	default:
368		dev_err(chan2dev(chan), "Dma bus width %i not supported\n",
369			width);
370		return -EINVAL;
371	}
372}
373
374static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr,
375							u32 buf_len, u32 tlen)
376{
377	enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
378
379	for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
380	     max_width > DMA_SLAVE_BUSWIDTH_1_BYTE;
381	     max_width >>= 1) {
382		/*
383		 * Address and buffer length both have to be aligned on
384		 * bus width
385		 */
386		if ((((buf_len | addr) & (max_width - 1)) == 0) &&
387		    tlen >= max_width)
388			break;
389	}
390
391	return max_width;
392}
393
394static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst,
395				     enum dma_slave_buswidth width)
396{
397	u32 best_burst;
398
399	best_burst = min((u32)1 << __ffs(tlen | buf_len),
400			 max_burst * width) / width;
401
402	return (best_burst > 0) ? best_burst : 1;
403}
404
405static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan)
406{
407	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
408	u32 ccr, cisr, id, reg;
409	int ret;
410
411	id = chan->id;
412	reg = STM32_MDMA_CCR(id);
413
414	/* Disable interrupts */
415	stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
416
417	ccr = stm32_mdma_read(dmadev, reg);
418	if (ccr & STM32_MDMA_CCR_EN) {
419		stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
420
421		/* Ensure that any ongoing transfer has been completed */
422		ret = readl_relaxed_poll_timeout_atomic(
423				dmadev->base + STM32_MDMA_CISR(id), cisr,
424				(cisr & STM32_MDMA_CISR_CTCIF), 10, 1000);
425		if (ret) {
426			dev_err(chan2dev(chan), "%s: timeout!\n", __func__);
427			return -EBUSY;
428		}
429	}
430
431	return 0;
432}
433
434static void stm32_mdma_stop(struct stm32_mdma_chan *chan)
435{
436	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
437	u32 status;
438	int ret;
439
440	/* Disable DMA */
441	ret = stm32_mdma_disable_chan(chan);
442	if (ret < 0)
443		return;
444
445	/* Clear interrupt status if it is there */
446	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
447	if (status) {
448		dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
449			__func__, status);
450		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
451	}
452
453	chan->busy = false;
454}
455
456static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
457			       u32 ctbr_mask, u32 src_addr)
458{
459	u32 mask;
460	int i;
461
462	/* Check if memory device is on AHB or AXI */
463	*ctbr &= ~ctbr_mask;
464	mask = src_addr & 0xF0000000;
465	for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
466		if (mask == dmadev->ahb_addr_masks[i]) {
467			*ctbr |= ctbr_mask;
468			break;
469		}
470	}
471}
472
473static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
474				     enum dma_transfer_direction direction,
475				     u32 *mdma_ccr, u32 *mdma_ctcr,
476				     u32 *mdma_ctbr, dma_addr_t addr,
477				     u32 buf_len)
478{
479	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
480	struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
481	enum dma_slave_buswidth src_addr_width, dst_addr_width;
482	phys_addr_t src_addr, dst_addr;
483	int src_bus_width, dst_bus_width;
484	u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
485	u32 ccr, ctcr, ctbr, tlen;
486
487	src_addr_width = chan->dma_config.src_addr_width;
488	dst_addr_width = chan->dma_config.dst_addr_width;
489	src_maxburst = chan->dma_config.src_maxburst;
490	dst_maxburst = chan->dma_config.dst_maxburst;
491
492	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & ~STM32_MDMA_CCR_EN;
493	ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
494	ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
495
496	/* Enable HW request mode */
497	ctcr &= ~STM32_MDMA_CTCR_SWRM;
498
499	/* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
500	ctcr &= ~STM32_MDMA_CTCR_CFG_MASK;
501	ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK;
502
503	/*
504	 * For buffer transfer length (TLEN) we have to set
505	 * the number of bytes - 1 in CTCR register
506	 */
507	tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr);
508	ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK;
509	ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
510
511	/* Disable Pack Enable */
512	ctcr &= ~STM32_MDMA_CTCR_PKE;
513
514	/* Check burst size constraints */
515	if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST ||
516	    dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) {
517		dev_err(chan2dev(chan),
518			"burst size * bus width higher than %d bytes\n",
519			STM32_MDMA_MAX_BURST);
520		return -EINVAL;
521	}
522
523	if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) ||
524	    (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) {
525		dev_err(chan2dev(chan), "burst size must be a power of 2\n");
526		return -EINVAL;
527	}
528
529	/*
530	 * Configure channel control:
531	 * - Clear SW request as in this case this is a HW one
532	 * - Clear WEX, HEX and BEX bits
533	 * - Set priority level
534	 */
535	ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
536		 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK);
537	ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level);
538
539	/* Configure Trigger selection */
540	ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
541	ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request);
542
543	switch (direction) {
544	case DMA_MEM_TO_DEV:
545		dst_addr = chan->dma_config.dst_addr;
546
547		/* Set device data size */
548		if (chan_config->m2m_hw)
549			dst_addr_width = stm32_mdma_get_max_width(dst_addr, buf_len,
550								  STM32_MDMA_MAX_BUF_LEN);
551		dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
552		if (dst_bus_width < 0)
553			return dst_bus_width;
554		ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK;
555		ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width);
556		if (chan_config->m2m_hw) {
557			ctcr &= ~STM32_MDMA_CTCR_DINCOS_MASK;
558			ctcr |= STM32_MDMA_CTCR_DINCOS(dst_bus_width);
559		}
560
561		/* Set device burst value */
562		if (chan_config->m2m_hw)
563			dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
564
565		dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
566							   dst_maxburst,
567							   dst_addr_width);
568		chan->mem_burst = dst_best_burst;
569		ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
570		ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
571
572		/* Set memory data size */
573		src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
574		chan->mem_width = src_addr_width;
575		src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
576		if (src_bus_width < 0)
577			return src_bus_width;
578		ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK |
579			STM32_MDMA_CTCR_SINCOS_MASK;
580		ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) |
581			STM32_MDMA_CTCR_SINCOS(src_bus_width);
582
583		/* Set memory burst value */
584		src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
585		src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
586							   src_maxburst,
587							   src_addr_width);
588		chan->mem_burst = src_best_burst;
589		ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
590		ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
591
592		/* Select bus */
593		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
594				   dst_addr);
595
596		if (dst_bus_width != src_bus_width)
597			ctcr |= STM32_MDMA_CTCR_PKE;
598
599		/* Set destination address */
600		stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
601		break;
602
603	case DMA_DEV_TO_MEM:
604		src_addr = chan->dma_config.src_addr;
605
606		/* Set device data size */
607		if (chan_config->m2m_hw)
608			src_addr_width = stm32_mdma_get_max_width(src_addr, buf_len,
609								  STM32_MDMA_MAX_BUF_LEN);
610
611		src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
612		if (src_bus_width < 0)
613			return src_bus_width;
614		ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK;
615		ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width);
616		if (chan_config->m2m_hw) {
617			ctcr &= ~STM32_MDMA_CTCR_SINCOS_MASK;
618			ctcr |= STM32_MDMA_CTCR_SINCOS(src_bus_width);
619		}
620
621		/* Set device burst value */
622		if (chan_config->m2m_hw)
623			src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
624
625		src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
626							   src_maxburst,
627							   src_addr_width);
628		ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
629		ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
630
631		/* Set memory data size */
632		dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
633		chan->mem_width = dst_addr_width;
634		dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
635		if (dst_bus_width < 0)
636			return dst_bus_width;
637		ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK |
638			STM32_MDMA_CTCR_DINCOS_MASK);
639		ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
640			STM32_MDMA_CTCR_DINCOS(dst_bus_width);
641
642		/* Set memory burst value */
643		dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
644		dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
645							   dst_maxburst,
646							   dst_addr_width);
647		ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
648		ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
649
650		/* Select bus */
651		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
652				   src_addr);
653
654		if (dst_bus_width != src_bus_width)
655			ctcr |= STM32_MDMA_CTCR_PKE;
656
657		/* Set source address */
658		stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
659		break;
660
661	default:
662		dev_err(chan2dev(chan), "Dma direction is not supported\n");
663		return -EINVAL;
664	}
665
666	*mdma_ccr = ccr;
667	*mdma_ctcr = ctcr;
668	*mdma_ctbr = ctbr;
669
670	return 0;
671}
672
673static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
674				   struct stm32_mdma_desc_node *node)
675{
676	dev_dbg(chan2dev(chan), "hwdesc:  %pad\n", &node->hwdesc_phys);
677	dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n", node->hwdesc->ctcr);
678	dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n", node->hwdesc->cbndtr);
679	dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n", node->hwdesc->csar);
680	dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n", node->hwdesc->cdar);
681	dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n", node->hwdesc->cbrur);
682	dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n", node->hwdesc->clar);
683	dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n", node->hwdesc->ctbr);
684	dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n", node->hwdesc->cmar);
685	dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n\n", node->hwdesc->cmdr);
686}
687
688static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
689				    struct stm32_mdma_desc *desc,
690				    enum dma_transfer_direction dir, u32 count,
691				    dma_addr_t src_addr, dma_addr_t dst_addr,
692				    u32 len, u32 ctcr, u32 ctbr, bool is_last,
693				    bool is_first, bool is_cyclic)
694{
695	struct stm32_mdma_chan_config *config = &chan->chan_config;
696	struct stm32_mdma_hwdesc *hwdesc;
697	u32 next = count + 1;
698
699	hwdesc = desc->node[count].hwdesc;
700	hwdesc->ctcr = ctcr;
701	hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
702			STM32_MDMA_CBNDTR_BRDUM |
703			STM32_MDMA_CBNDTR_BRSUM |
704			STM32_MDMA_CBNDTR_BNDT_MASK);
705	hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
706	hwdesc->csar = src_addr;
707	hwdesc->cdar = dst_addr;
708	hwdesc->cbrur = 0;
709	hwdesc->ctbr = ctbr;
710	hwdesc->cmar = config->mask_addr;
711	hwdesc->cmdr = config->mask_data;
712
713	if (is_last) {
714		if (is_cyclic)
715			hwdesc->clar = desc->node[0].hwdesc_phys;
716		else
717			hwdesc->clar = 0;
718	} else {
719		hwdesc->clar = desc->node[next].hwdesc_phys;
720	}
721
722	stm32_mdma_dump_hwdesc(chan, &desc->node[count]);
723}
724
725static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
726				 struct stm32_mdma_desc *desc,
727				 struct scatterlist *sgl, u32 sg_len,
728				 enum dma_transfer_direction direction)
729{
730	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
731	struct dma_slave_config *dma_config = &chan->dma_config;
732	struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
733	struct scatterlist *sg;
734	dma_addr_t src_addr, dst_addr;
735	u32 m2m_hw_period, ccr, ctcr, ctbr;
736	int i, ret = 0;
737
738	if (chan_config->m2m_hw)
739		m2m_hw_period = sg_dma_len(sgl);
740
741	for_each_sg(sgl, sg, sg_len, i) {
742		if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
743			dev_err(chan2dev(chan), "Invalid block len\n");
744			return -EINVAL;
745		}
746
747		if (direction == DMA_MEM_TO_DEV) {
748			src_addr = sg_dma_address(sg);
749			dst_addr = dma_config->dst_addr;
750			if (chan_config->m2m_hw && (i & 1))
751				dst_addr += m2m_hw_period;
752			ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
753							&ctcr, &ctbr, src_addr,
754							sg_dma_len(sg));
755			stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
756					   src_addr);
757		} else {
758			src_addr = dma_config->src_addr;
759			if (chan_config->m2m_hw && (i & 1))
760				src_addr += m2m_hw_period;
761			dst_addr = sg_dma_address(sg);
762			ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
763							&ctcr, &ctbr, dst_addr,
764							sg_dma_len(sg));
765			stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
766					   dst_addr);
767		}
768
769		if (ret < 0)
770			return ret;
771
772		stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
773					dst_addr, sg_dma_len(sg), ctcr, ctbr,
774					i == sg_len - 1, i == 0, false);
775	}
776
777	/* Enable interrupts */
778	ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
779	ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE;
780	desc->ccr = ccr;
781
782	return 0;
783}
784
785static struct dma_async_tx_descriptor *
786stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
787			 u32 sg_len, enum dma_transfer_direction direction,
788			 unsigned long flags, void *context)
789{
790	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
791	struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
792	struct stm32_mdma_desc *desc;
793	int i, ret;
794
795	/*
796	 * Once DMA is in setup cyclic mode the channel we cannot assign this
797	 * channel anymore. The DMA channel needs to be aborted or terminated
798	 * for allowing another request.
799	 */
800	if (chan->desc && chan->desc->cyclic) {
801		dev_err(chan2dev(chan),
802			"Request not allowed when dma in cyclic mode\n");
803		return NULL;
804	}
805
806	desc = stm32_mdma_alloc_desc(chan, sg_len);
807	if (!desc)
808		return NULL;
809
810	ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction);
811	if (ret < 0)
812		goto xfer_setup_err;
813
814	/*
815	 * In case of M2M HW transfer triggered by STM32 DMA, we do not have to clear the
816	 * transfer complete flag by hardware in order to let the CPU rearm the STM32 DMA
817	 * with the next sg element and update some data in dmaengine framework.
818	 */
819	if (chan_config->m2m_hw && direction == DMA_MEM_TO_DEV) {
820		struct stm32_mdma_hwdesc *hwdesc;
821
822		for (i = 0; i < sg_len; i++) {
823			hwdesc = desc->node[i].hwdesc;
824			hwdesc->cmar = 0;
825			hwdesc->cmdr = 0;
826		}
827	}
828
829	desc->cyclic = false;
830
831	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
832
833xfer_setup_err:
834	for (i = 0; i < desc->count; i++)
835		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
836			      desc->node[i].hwdesc_phys);
837	kfree(desc);
838	return NULL;
839}
840
841static struct dma_async_tx_descriptor *
842stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
843			   size_t buf_len, size_t period_len,
844			   enum dma_transfer_direction direction,
845			   unsigned long flags)
846{
847	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
848	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
849	struct dma_slave_config *dma_config = &chan->dma_config;
850	struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
851	struct stm32_mdma_desc *desc;
852	dma_addr_t src_addr, dst_addr;
853	u32 ccr, ctcr, ctbr, count;
854	int i, ret;
855
856	/*
857	 * Once DMA is in setup cyclic mode the channel we cannot assign this
858	 * channel anymore. The DMA channel needs to be aborted or terminated
859	 * for allowing another request.
860	 */
861	if (chan->desc && chan->desc->cyclic) {
862		dev_err(chan2dev(chan),
863			"Request not allowed when dma in cyclic mode\n");
864		return NULL;
865	}
866
867	if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) {
868		dev_err(chan2dev(chan), "Invalid buffer/period len\n");
869		return NULL;
870	}
871
872	if (buf_len % period_len) {
873		dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
874		return NULL;
875	}
876
877	count = buf_len / period_len;
878
879	desc = stm32_mdma_alloc_desc(chan, count);
880	if (!desc)
881		return NULL;
882
883	/* Select bus */
884	if (direction == DMA_MEM_TO_DEV) {
885		src_addr = buf_addr;
886		ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
887						&ctbr, src_addr, period_len);
888		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
889				   src_addr);
890	} else {
891		dst_addr = buf_addr;
892		ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
893						&ctbr, dst_addr, period_len);
894		stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
895				   dst_addr);
896	}
897
898	if (ret < 0)
899		goto xfer_setup_err;
900
901	/* Enable interrupts */
902	ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
903	ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE;
904	desc->ccr = ccr;
905
906	/* Configure hwdesc list */
907	for (i = 0; i < count; i++) {
908		if (direction == DMA_MEM_TO_DEV) {
909			src_addr = buf_addr + i * period_len;
910			dst_addr = dma_config->dst_addr;
911			if (chan_config->m2m_hw && (i & 1))
912				dst_addr += period_len;
913		} else {
914			src_addr = dma_config->src_addr;
915			if (chan_config->m2m_hw && (i & 1))
916				src_addr += period_len;
917			dst_addr = buf_addr + i * period_len;
918		}
919
920		stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
921					dst_addr, period_len, ctcr, ctbr,
922					i == count - 1, i == 0, true);
923	}
924
925	desc->cyclic = true;
926
927	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
928
929xfer_setup_err:
930	for (i = 0; i < desc->count; i++)
931		dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
932			      desc->node[i].hwdesc_phys);
933	kfree(desc);
934	return NULL;
935}
936
937static struct dma_async_tx_descriptor *
938stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
939			   size_t len, unsigned long flags)
940{
941	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
942	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
943	enum dma_slave_buswidth max_width;
944	struct stm32_mdma_desc *desc;
945	struct stm32_mdma_hwdesc *hwdesc;
946	u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst;
947	u32 best_burst, tlen;
948	size_t xfer_count, offset;
949	int src_bus_width, dst_bus_width;
950	int i;
951
952	/*
953	 * Once DMA is in setup cyclic mode the channel we cannot assign this
954	 * channel anymore. The DMA channel needs to be aborted or terminated
955	 * to allow another request
956	 */
957	if (chan->desc && chan->desc->cyclic) {
958		dev_err(chan2dev(chan),
959			"Request not allowed when dma in cyclic mode\n");
960		return NULL;
961	}
962
963	count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN);
964	desc = stm32_mdma_alloc_desc(chan, count);
965	if (!desc)
966		return NULL;
967
968	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & ~STM32_MDMA_CCR_EN;
969	ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
970	ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
971	cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
972
973	/* Enable sw req, some interrupts and clear other bits */
974	ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
975		 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK |
976		 STM32_MDMA_CCR_IRQ_MASK);
977	ccr |= STM32_MDMA_CCR_TEIE;
978
979	/* Enable SW request mode, dest/src inc and clear other bits */
980	ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK |
981		  STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE |
982		  STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK |
983		  STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK |
984		  STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK |
985		  STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK |
986		  STM32_MDMA_CTCR_SINC_MASK);
987	ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) |
988		STM32_MDMA_CTCR_DINC(STM32_MDMA_INC);
989
990	/* Reset HW request */
991	ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
992
993	/* Select bus */
994	stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
995	stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
996
997	/* Clear CBNDTR registers */
998	cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM |
999			STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK);
1000
1001	if (len <= STM32_MDMA_MAX_BLOCK_LEN) {
1002		cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
1003		if (len <= STM32_MDMA_MAX_BUF_LEN) {
1004			/* Setup a buffer transfer */
1005			ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE;
1006			ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER);
1007		} else {
1008			/* Setup a block transfer */
1009			ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1010			ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK);
1011		}
1012
1013		tlen = STM32_MDMA_MAX_BUF_LEN;
1014		ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
1015
1016		/* Set source best burst size */
1017		max_width = stm32_mdma_get_max_width(src, len, tlen);
1018		src_bus_width = stm32_mdma_get_width(chan, max_width);
1019
1020		max_burst = tlen / max_width;
1021		best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
1022						       max_width);
1023		mdma_burst = ilog2(best_burst);
1024
1025		ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1026			STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1027			STM32_MDMA_CTCR_SINCOS(src_bus_width);
1028
1029		/* Set destination best burst size */
1030		max_width = stm32_mdma_get_max_width(dest, len, tlen);
1031		dst_bus_width = stm32_mdma_get_width(chan, max_width);
1032
1033		max_burst = tlen / max_width;
1034		best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
1035						       max_width);
1036		mdma_burst = ilog2(best_burst);
1037
1038		ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1039			STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1040			STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1041
1042		if (dst_bus_width != src_bus_width)
1043			ctcr |= STM32_MDMA_CTCR_PKE;
1044
1045		/* Prepare hardware descriptor */
1046		hwdesc = desc->node[0].hwdesc;
1047		hwdesc->ctcr = ctcr;
1048		hwdesc->cbndtr = cbndtr;
1049		hwdesc->csar = src;
1050		hwdesc->cdar = dest;
1051		hwdesc->cbrur = 0;
1052		hwdesc->clar = 0;
1053		hwdesc->ctbr = ctbr;
1054		hwdesc->cmar = 0;
1055		hwdesc->cmdr = 0;
1056
1057		stm32_mdma_dump_hwdesc(chan, &desc->node[0]);
1058	} else {
1059		/* Setup a LLI transfer */
1060		ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
1061			STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1));
1062		ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1063		tlen = STM32_MDMA_MAX_BUF_LEN;
1064
1065		for (i = 0, offset = 0; offset < len;
1066		     i++, offset += xfer_count) {
1067			xfer_count = min_t(size_t, len - offset,
1068					   STM32_MDMA_MAX_BLOCK_LEN);
1069
1070			/* Set source best burst size */
1071			max_width = stm32_mdma_get_max_width(src, len, tlen);
1072			src_bus_width = stm32_mdma_get_width(chan, max_width);
1073
1074			max_burst = tlen / max_width;
1075			best_burst = stm32_mdma_get_best_burst(len, tlen,
1076							       max_burst,
1077							       max_width);
1078			mdma_burst = ilog2(best_burst);
1079
1080			ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1081				STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1082				STM32_MDMA_CTCR_SINCOS(src_bus_width);
1083
1084			/* Set destination best burst size */
1085			max_width = stm32_mdma_get_max_width(dest, len, tlen);
1086			dst_bus_width = stm32_mdma_get_width(chan, max_width);
1087
1088			max_burst = tlen / max_width;
1089			best_burst = stm32_mdma_get_best_burst(len, tlen,
1090							       max_burst,
1091							       max_width);
1092			mdma_burst = ilog2(best_burst);
1093
1094			ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1095				STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1096				STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1097
1098			if (dst_bus_width != src_bus_width)
1099				ctcr |= STM32_MDMA_CTCR_PKE;
1100
1101			/* Prepare hardware descriptor */
1102			stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i,
1103						src + offset, dest + offset,
1104						xfer_count, ctcr, ctbr,
1105						i == count - 1, i == 0, false);
1106		}
1107	}
1108
1109	desc->ccr = ccr;
1110
1111	desc->cyclic = false;
1112
1113	return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1114}
1115
1116static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan)
1117{
1118	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1119
1120	dev_dbg(chan2dev(chan), "CCR:     0x%08x\n",
1121		stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
1122	dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n",
1123		stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
1124	dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n",
1125		stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
1126	dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n",
1127		stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
1128	dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n",
1129		stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
1130	dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n",
1131		stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
1132	dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n",
1133		stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
1134	dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n",
1135		stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
1136	dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n",
1137		stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
1138	dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n",
1139		stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
1140}
1141
1142static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
1143{
1144	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1145	struct virt_dma_desc *vdesc;
1146	struct stm32_mdma_hwdesc *hwdesc;
1147	u32 id = chan->id;
1148	u32 status, reg;
1149
1150	vdesc = vchan_next_desc(&chan->vchan);
1151	if (!vdesc) {
1152		chan->desc = NULL;
1153		return;
1154	}
1155
1156	list_del(&vdesc->node);
1157
1158	chan->desc = to_stm32_mdma_desc(vdesc);
1159	hwdesc = chan->desc->node[0].hwdesc;
1160	chan->curr_hwdesc = 0;
1161
1162	stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
1163	stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
1164	stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
1165	stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
1166	stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
1167	stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
1168	stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
1169	stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
1170	stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
1171	stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
1172
1173	/* Clear interrupt status if it is there */
1174	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1175	if (status)
1176		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
1177
1178	stm32_mdma_dump_reg(chan);
1179
1180	/* Start DMA */
1181	stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
1182
1183	/* Set SW request in case of MEM2MEM transfer */
1184	if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) {
1185		reg = STM32_MDMA_CCR(id);
1186		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1187	}
1188
1189	chan->busy = true;
1190
1191	dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan);
1192}
1193
1194static void stm32_mdma_issue_pending(struct dma_chan *c)
1195{
1196	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1197	unsigned long flags;
1198
1199	spin_lock_irqsave(&chan->vchan.lock, flags);
1200
1201	if (!vchan_issue_pending(&chan->vchan))
1202		goto end;
1203
1204	dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan);
1205
1206	if (!chan->desc && !chan->busy)
1207		stm32_mdma_start_transfer(chan);
1208
1209end:
1210	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1211}
1212
1213static int stm32_mdma_pause(struct dma_chan *c)
1214{
1215	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1216	unsigned long flags;
1217	int ret;
1218
1219	spin_lock_irqsave(&chan->vchan.lock, flags);
1220	ret = stm32_mdma_disable_chan(chan);
1221	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1222
1223	if (!ret)
1224		dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan);
1225
1226	return ret;
1227}
1228
1229static int stm32_mdma_resume(struct dma_chan *c)
1230{
1231	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1232	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1233	struct stm32_mdma_hwdesc *hwdesc;
1234	unsigned long flags;
1235	u32 status, reg;
1236
1237	/* Transfer can be terminated */
1238	if (!chan->desc || (stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & STM32_MDMA_CCR_EN))
1239		return -EPERM;
1240
1241	hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc;
1242
1243	spin_lock_irqsave(&chan->vchan.lock, flags);
1244
1245	/* Re-configure control register */
1246	stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
1247
1248	/* Clear interrupt status if it is there */
1249	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1250	if (status)
1251		stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
1252
1253	stm32_mdma_dump_reg(chan);
1254
1255	/* Re-start DMA */
1256	reg = STM32_MDMA_CCR(chan->id);
1257	stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
1258
1259	/* Set SW request in case of MEM2MEM transfer */
1260	if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM)
1261		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1262
1263	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1264
1265	dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan);
1266
1267	return 0;
1268}
1269
1270static int stm32_mdma_terminate_all(struct dma_chan *c)
1271{
1272	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1273	unsigned long flags;
1274	LIST_HEAD(head);
1275
1276	spin_lock_irqsave(&chan->vchan.lock, flags);
1277	if (chan->desc) {
1278		vchan_terminate_vdesc(&chan->desc->vdesc);
1279		if (chan->busy)
1280			stm32_mdma_stop(chan);
1281		chan->desc = NULL;
1282	}
1283	vchan_get_all_descriptors(&chan->vchan, &head);
1284	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1285
1286	vchan_dma_desc_free_list(&chan->vchan, &head);
1287
1288	return 0;
1289}
1290
1291static void stm32_mdma_synchronize(struct dma_chan *c)
1292{
1293	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1294
1295	vchan_synchronize(&chan->vchan);
1296}
1297
1298static int stm32_mdma_slave_config(struct dma_chan *c,
1299				   struct dma_slave_config *config)
1300{
1301	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1302
1303	memcpy(&chan->dma_config, config, sizeof(*config));
1304
1305	/* Check if user is requesting STM32 DMA to trigger MDMA */
1306	if (config->peripheral_size) {
1307		struct stm32_mdma_dma_config *mdma_config;
1308
1309		mdma_config = (struct stm32_mdma_dma_config *)chan->dma_config.peripheral_config;
1310		chan->chan_config.request = mdma_config->request;
1311		chan->chan_config.mask_addr = mdma_config->cmar;
1312		chan->chan_config.mask_data = mdma_config->cmdr;
1313		chan->chan_config.m2m_hw = true;
1314	}
1315
1316	return 0;
1317}
1318
1319static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
1320				      struct stm32_mdma_desc *desc,
1321				      u32 curr_hwdesc,
1322				      struct dma_tx_state *state)
1323{
1324	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1325	struct stm32_mdma_hwdesc *hwdesc;
1326	u32 cisr, clar, cbndtr, residue, modulo, burst_size;
1327	int i;
1328
1329	cisr = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1330
1331	residue = 0;
1332	/* Get the next hw descriptor to process from current transfer */
1333	clar = stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id));
1334	for (i = desc->count - 1; i >= 0; i--) {
1335		hwdesc = desc->node[i].hwdesc;
1336
1337		if (hwdesc->clar == clar)
1338			break;/* Current transfer found, stop cumulating */
1339
1340		/* Cumulate residue of unprocessed hw descriptors */
1341		residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
1342	}
1343	cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
1344	residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
1345
1346	state->in_flight_bytes = 0;
1347	if (chan->chan_config.m2m_hw && (cisr & STM32_MDMA_CISR_CRQA))
1348		state->in_flight_bytes = cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
1349
1350	if (!chan->mem_burst)
1351		return residue;
1352
1353	burst_size = chan->mem_burst * chan->mem_width;
1354	modulo = residue % burst_size;
1355	if (modulo)
1356		residue = residue - modulo + burst_size;
1357
1358	return residue;
1359}
1360
1361static enum dma_status stm32_mdma_tx_status(struct dma_chan *c,
1362					    dma_cookie_t cookie,
1363					    struct dma_tx_state *state)
1364{
1365	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1366	struct virt_dma_desc *vdesc;
1367	enum dma_status status;
1368	unsigned long flags;
1369	u32 residue = 0;
1370
1371	status = dma_cookie_status(c, cookie, state);
1372	if ((status == DMA_COMPLETE) || (!state))
1373		return status;
1374
1375	spin_lock_irqsave(&chan->vchan.lock, flags);
1376
1377	vdesc = vchan_find_desc(&chan->vchan, cookie);
1378	if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
1379		residue = stm32_mdma_desc_residue(chan, chan->desc, chan->curr_hwdesc, state);
1380	else if (vdesc)
1381		residue = stm32_mdma_desc_residue(chan, to_stm32_mdma_desc(vdesc), 0, state);
1382
1383	dma_set_residue(state, residue);
1384
1385	spin_unlock_irqrestore(&chan->vchan.lock, flags);
1386
1387	return status;
1388}
1389
1390static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan)
1391{
1392	vchan_cookie_complete(&chan->desc->vdesc);
1393	chan->desc = NULL;
1394	chan->busy = false;
1395
1396	/* Start the next transfer if this driver has a next desc */
1397	stm32_mdma_start_transfer(chan);
1398}
1399
1400static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid)
1401{
1402	struct stm32_mdma_device *dmadev = devid;
1403	struct stm32_mdma_chan *chan;
1404	u32 reg, id, ccr, ien, status;
1405
1406	/* Find out which channel generates the interrupt */
1407	status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
1408	if (!status) {
1409		dev_dbg(mdma2dev(dmadev), "spurious it\n");
1410		return IRQ_NONE;
1411	}
1412	id = __ffs(status);
1413	chan = &dmadev->chan[id];
1414
1415	/* Handle interrupt for the channel */
1416	spin_lock(&chan->vchan.lock);
1417	status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1418	/* Mask Channel ReQuest Active bit which can be set in case of MEM2MEM */
1419	status &= ~STM32_MDMA_CISR_CRQA;
1420	ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1421	ien = (ccr & STM32_MDMA_CCR_IRQ_MASK) >> 1;
1422
1423	if (!(status & ien)) {
1424		spin_unlock(&chan->vchan.lock);
1425		if (chan->busy)
1426			dev_warn(chan2dev(chan),
1427				 "spurious it (status=0x%04x, ien=0x%04x)\n", status, ien);
1428		else
1429			dev_dbg(chan2dev(chan),
1430				"spurious it (status=0x%04x, ien=0x%04x)\n", status, ien);
1431		return IRQ_NONE;
1432	}
1433
1434	reg = STM32_MDMA_CIFCR(id);
1435
1436	if (status & STM32_MDMA_CISR_TEIF) {
1437		dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n",
1438			readl_relaxed(dmadev->base + STM32_MDMA_CESR(id)));
1439		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
1440		status &= ~STM32_MDMA_CISR_TEIF;
1441	}
1442
1443	if (status & STM32_MDMA_CISR_CTCIF) {
1444		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
1445		status &= ~STM32_MDMA_CISR_CTCIF;
1446		stm32_mdma_xfer_end(chan);
1447	}
1448
1449	if (status & STM32_MDMA_CISR_BRTIF) {
1450		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
1451		status &= ~STM32_MDMA_CISR_BRTIF;
1452	}
1453
1454	if (status & STM32_MDMA_CISR_BTIF) {
1455		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
1456		status &= ~STM32_MDMA_CISR_BTIF;
1457		chan->curr_hwdesc++;
1458		if (chan->desc && chan->desc->cyclic) {
1459			if (chan->curr_hwdesc == chan->desc->count)
1460				chan->curr_hwdesc = 0;
1461			vchan_cyclic_callback(&chan->desc->vdesc);
1462		}
1463	}
1464
1465	if (status & STM32_MDMA_CISR_TCIF) {
1466		stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
1467		status &= ~STM32_MDMA_CISR_TCIF;
1468	}
1469
1470	if (status) {
1471		stm32_mdma_set_bits(dmadev, reg, status);
1472		dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status);
1473		if (!(ccr & STM32_MDMA_CCR_EN))
1474			dev_err(chan2dev(chan), "chan disabled by HW\n");
1475	}
1476
1477	spin_unlock(&chan->vchan.lock);
1478
1479	return IRQ_HANDLED;
1480}
1481
1482static int stm32_mdma_alloc_chan_resources(struct dma_chan *c)
1483{
1484	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1485	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1486	int ret;
1487
1488	chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device),
1489					   c->device->dev,
1490					   sizeof(struct stm32_mdma_hwdesc),
1491					  __alignof__(struct stm32_mdma_hwdesc),
1492					   0);
1493	if (!chan->desc_pool) {
1494		dev_err(chan2dev(chan), "failed to allocate descriptor pool\n");
1495		return -ENOMEM;
1496	}
1497
1498	ret = pm_runtime_resume_and_get(dmadev->ddev.dev);
1499	if (ret < 0)
1500		return ret;
1501
1502	ret = stm32_mdma_disable_chan(chan);
1503	if (ret < 0)
1504		pm_runtime_put(dmadev->ddev.dev);
1505
1506	return ret;
1507}
1508
1509static void stm32_mdma_free_chan_resources(struct dma_chan *c)
1510{
1511	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1512	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1513	unsigned long flags;
1514
1515	dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
1516
1517	if (chan->busy) {
1518		spin_lock_irqsave(&chan->vchan.lock, flags);
1519		stm32_mdma_stop(chan);
1520		chan->desc = NULL;
1521		spin_unlock_irqrestore(&chan->vchan.lock, flags);
1522	}
1523
1524	pm_runtime_put(dmadev->ddev.dev);
1525	vchan_free_chan_resources(to_virt_chan(c));
1526	dmam_pool_destroy(chan->desc_pool);
1527	chan->desc_pool = NULL;
1528}
1529
1530static bool stm32_mdma_filter_fn(struct dma_chan *c, void *fn_param)
1531{
1532	struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1533	struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1534
1535	/* Check if chan is marked Secure */
1536	if (dmadev->chan_reserved & BIT(chan->id))
1537		return false;
1538
1539	return true;
1540}
1541
1542static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec,
1543					    struct of_dma *ofdma)
1544{
1545	struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
1546	dma_cap_mask_t mask = dmadev->ddev.cap_mask;
1547	struct stm32_mdma_chan *chan;
1548	struct dma_chan *c;
1549	struct stm32_mdma_chan_config config;
1550
1551	if (dma_spec->args_count < 5) {
1552		dev_err(mdma2dev(dmadev), "Bad number of args\n");
1553		return NULL;
1554	}
1555
1556	memset(&config, 0, sizeof(config));
1557	config.request = dma_spec->args[0];
1558	config.priority_level = dma_spec->args[1];
1559	config.transfer_config = dma_spec->args[2];
1560	config.mask_addr = dma_spec->args[3];
1561	config.mask_data = dma_spec->args[4];
1562
1563	if (config.request >= dmadev->nr_requests) {
1564		dev_err(mdma2dev(dmadev), "Bad request line\n");
1565		return NULL;
1566	}
1567
1568	if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) {
1569		dev_err(mdma2dev(dmadev), "Priority level not supported\n");
1570		return NULL;
1571	}
1572
1573	c = __dma_request_channel(&mask, stm32_mdma_filter_fn, &config, ofdma->of_node);
1574	if (!c) {
1575		dev_err(mdma2dev(dmadev), "No more channels available\n");
1576		return NULL;
1577	}
1578
1579	chan = to_stm32_mdma_chan(c);
1580	chan->chan_config = config;
1581
1582	return c;
1583}
1584
1585static const struct of_device_id stm32_mdma_of_match[] = {
1586	{ .compatible = "st,stm32h7-mdma", },
1587	{ /* sentinel */ },
1588};
1589MODULE_DEVICE_TABLE(of, stm32_mdma_of_match);
1590
1591static int stm32_mdma_probe(struct platform_device *pdev)
1592{
1593	struct stm32_mdma_chan *chan;
1594	struct stm32_mdma_device *dmadev;
1595	struct dma_device *dd;
1596	struct device_node *of_node;
1597	struct reset_control *rst;
1598	u32 nr_channels, nr_requests;
1599	int i, count, ret;
1600
1601	of_node = pdev->dev.of_node;
1602	if (!of_node)
1603		return -ENODEV;
1604
1605	ret = device_property_read_u32(&pdev->dev, "dma-channels",
1606				       &nr_channels);
1607	if (ret) {
1608		nr_channels = STM32_MDMA_MAX_CHANNELS;
1609		dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n",
1610			 nr_channels);
1611	}
1612
1613	ret = device_property_read_u32(&pdev->dev, "dma-requests",
1614				       &nr_requests);
1615	if (ret) {
1616		nr_requests = STM32_MDMA_MAX_REQUESTS;
1617		dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n",
1618			 nr_requests);
1619	}
1620
1621	count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks");
1622	if (count < 0)
1623		count = 0;
1624
1625	dmadev = devm_kzalloc(&pdev->dev,
1626			      struct_size(dmadev, ahb_addr_masks, count),
1627			      GFP_KERNEL);
1628	if (!dmadev)
1629		return -ENOMEM;
1630
1631	dmadev->nr_channels = nr_channels;
1632	dmadev->nr_requests = nr_requests;
1633	device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1634				       dmadev->ahb_addr_masks,
1635				       count);
1636	dmadev->nr_ahb_addr_masks = count;
1637
1638	dmadev->base = devm_platform_ioremap_resource(pdev, 0);
1639	if (IS_ERR(dmadev->base))
1640		return PTR_ERR(dmadev->base);
1641
1642	dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1643	if (IS_ERR(dmadev->clk))
1644		return dev_err_probe(&pdev->dev, PTR_ERR(dmadev->clk),
1645				     "Missing clock controller\n");
1646
1647	ret = clk_prepare_enable(dmadev->clk);
1648	if (ret < 0) {
1649		dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret);
1650		return ret;
1651	}
1652
1653	rst = devm_reset_control_get(&pdev->dev, NULL);
1654	if (IS_ERR(rst)) {
1655		ret = PTR_ERR(rst);
1656		if (ret == -EPROBE_DEFER)
1657			goto err_clk;
1658	} else {
1659		reset_control_assert(rst);
1660		udelay(2);
1661		reset_control_deassert(rst);
1662	}
1663
1664	dd = &dmadev->ddev;
1665	dma_cap_set(DMA_SLAVE, dd->cap_mask);
1666	dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1667	dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1668	dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1669	dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources;
1670	dd->device_free_chan_resources = stm32_mdma_free_chan_resources;
1671	dd->device_tx_status = stm32_mdma_tx_status;
1672	dd->device_issue_pending = stm32_mdma_issue_pending;
1673	dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg;
1674	dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic;
1675	dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy;
1676	dd->device_config = stm32_mdma_slave_config;
1677	dd->device_pause = stm32_mdma_pause;
1678	dd->device_resume = stm32_mdma_resume;
1679	dd->device_terminate_all = stm32_mdma_terminate_all;
1680	dd->device_synchronize = stm32_mdma_synchronize;
1681	dd->descriptor_reuse = true;
1682
1683	dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1684		BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1685		BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1686		BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1687	dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1688		BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1689		BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1690		BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1691	dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
1692		BIT(DMA_MEM_TO_MEM);
1693	dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1694	dd->max_burst = STM32_MDMA_MAX_BURST;
1695	dd->dev = &pdev->dev;
1696	INIT_LIST_HEAD(&dd->channels);
1697
1698	for (i = 0; i < dmadev->nr_channels; i++) {
1699		chan = &dmadev->chan[i];
1700		chan->id = i;
1701
1702		if (stm32_mdma_read(dmadev, STM32_MDMA_CCR(i)) & STM32_MDMA_CCR_SM)
1703			dmadev->chan_reserved |= BIT(i);
1704
1705		chan->vchan.desc_free = stm32_mdma_desc_free;
1706		vchan_init(&chan->vchan, dd);
1707	}
1708
1709	dmadev->irq = platform_get_irq(pdev, 0);
1710	if (dmadev->irq < 0) {
1711		ret = dmadev->irq;
1712		goto err_clk;
1713	}
1714
1715	ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
1716			       0, dev_name(&pdev->dev), dmadev);
1717	if (ret) {
1718		dev_err(&pdev->dev, "failed to request IRQ\n");
1719		goto err_clk;
1720	}
1721
1722	ret = dmaenginem_async_device_register(dd);
1723	if (ret)
1724		goto err_clk;
1725
1726	ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
1727	if (ret < 0) {
1728		dev_err(&pdev->dev,
1729			"STM32 MDMA DMA OF registration failed %d\n", ret);
1730		goto err_clk;
1731	}
1732
1733	platform_set_drvdata(pdev, dmadev);
1734	pm_runtime_set_active(&pdev->dev);
1735	pm_runtime_enable(&pdev->dev);
1736	pm_runtime_get_noresume(&pdev->dev);
1737	pm_runtime_put(&pdev->dev);
1738
1739	dev_info(&pdev->dev, "STM32 MDMA driver registered\n");
1740
1741	return 0;
1742
1743err_clk:
1744	clk_disable_unprepare(dmadev->clk);
1745
1746	return ret;
1747}
1748
1749#ifdef CONFIG_PM
1750static int stm32_mdma_runtime_suspend(struct device *dev)
1751{
1752	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1753
1754	clk_disable_unprepare(dmadev->clk);
1755
1756	return 0;
1757}
1758
1759static int stm32_mdma_runtime_resume(struct device *dev)
1760{
1761	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1762	int ret;
1763
1764	ret = clk_prepare_enable(dmadev->clk);
1765	if (ret) {
1766		dev_err(dev, "failed to prepare_enable clock\n");
1767		return ret;
1768	}
1769
1770	return 0;
1771}
1772#endif
1773
1774#ifdef CONFIG_PM_SLEEP
1775static int stm32_mdma_pm_suspend(struct device *dev)
1776{
1777	struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1778	u32 ccr, id;
1779	int ret;
1780
1781	ret = pm_runtime_resume_and_get(dev);
1782	if (ret < 0)
1783		return ret;
1784
1785	for (id = 0; id < dmadev->nr_channels; id++) {
1786		ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1787		if (ccr & STM32_MDMA_CCR_EN) {
1788			dev_warn(dev, "Suspend is prevented by Chan %i\n", id);
1789			return -EBUSY;
1790		}
1791	}
1792
1793	pm_runtime_put_sync(dev);
1794
1795	pm_runtime_force_suspend(dev);
1796
1797	return 0;
1798}
1799
1800static int stm32_mdma_pm_resume(struct device *dev)
1801{
1802	return pm_runtime_force_resume(dev);
1803}
1804#endif
1805
1806static const struct dev_pm_ops stm32_mdma_pm_ops = {
1807	SET_SYSTEM_SLEEP_PM_OPS(stm32_mdma_pm_suspend, stm32_mdma_pm_resume)
1808	SET_RUNTIME_PM_OPS(stm32_mdma_runtime_suspend,
1809			   stm32_mdma_runtime_resume, NULL)
1810};
1811
1812static struct platform_driver stm32_mdma_driver = {
1813	.probe = stm32_mdma_probe,
1814	.driver = {
1815		.name = "stm32-mdma",
1816		.of_match_table = stm32_mdma_of_match,
1817		.pm = &stm32_mdma_pm_ops,
1818	},
1819};
1820
1821static int __init stm32_mdma_init(void)
1822{
1823	return platform_driver_register(&stm32_mdma_driver);
1824}
1825
1826subsys_initcall(stm32_mdma_init);
1827
1828MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1829MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1830MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
1831