1// SPDX-License-Identifier: GPL-2.0
2// Copyright (C) 2019 Spreadtrum Communications Inc.
3
4#include <linux/dma-mapping.h>
5#include <linux/dmaengine.h>
6#include <linux/dma/sprd-dma.h>
7#include <linux/kernel.h>
8#include <linux/module.h>
9#include <linux/of_reserved_mem.h>
10#include <linux/platform_device.h>
11#include <sound/pcm.h>
12#include <sound/pcm_params.h>
13#include <sound/soc.h>
14
15#include "sprd-pcm-dma.h"
16
17#define SPRD_PCM_DMA_LINKLIST_SIZE	64
18#define SPRD_PCM_DMA_BRUST_LEN		640
19
20struct sprd_pcm_dma_data {
21	struct dma_chan *chan;
22	struct dma_async_tx_descriptor *desc;
23	dma_cookie_t cookie;
24	dma_addr_t phys;
25	void *virt;
26	int pre_pointer;
27};
28
29struct sprd_pcm_dma_private {
30	struct snd_pcm_substream *substream;
31	struct sprd_pcm_dma_params *params;
32	struct sprd_pcm_dma_data data[SPRD_PCM_CHANNEL_MAX];
33	int hw_chan;
34	int dma_addr_offset;
35};
36
37static const struct snd_pcm_hardware sprd_pcm_hardware = {
38	.info = SNDRV_PCM_INFO_MMAP | SNDRV_PCM_INFO_MMAP_VALID |
39		SNDRV_PCM_INFO_INTERLEAVED | SNDRV_PCM_INFO_PAUSE |
40		SNDRV_PCM_INFO_RESUME | SNDRV_PCM_INFO_NO_PERIOD_WAKEUP,
41	.formats = SNDRV_PCM_FMTBIT_S16_LE | SNDRV_PCM_FMTBIT_S24_LE,
42	.period_bytes_min = 1,
43	.period_bytes_max = 64 * 1024,
44	.periods_min = 1,
45	.periods_max = PAGE_SIZE / SPRD_PCM_DMA_LINKLIST_SIZE,
46	.buffer_bytes_max = 64 * 1024,
47};
48
49static int sprd_pcm_open(struct snd_soc_component *component,
50			 struct snd_pcm_substream *substream)
51{
52	struct snd_pcm_runtime *runtime = substream->runtime;
53	struct device *dev = component->dev;
54	struct sprd_pcm_dma_private *dma_private;
55	int hw_chan = SPRD_PCM_CHANNEL_MAX;
56	int size, ret, i;
57
58	snd_soc_set_runtime_hwparams(substream, &sprd_pcm_hardware);
59
60	ret = snd_pcm_hw_constraint_step(runtime, 0,
61					 SNDRV_PCM_HW_PARAM_PERIOD_BYTES,
62					 SPRD_PCM_DMA_BRUST_LEN);
63	if (ret < 0)
64		return ret;
65
66	ret = snd_pcm_hw_constraint_step(runtime, 0,
67					 SNDRV_PCM_HW_PARAM_BUFFER_BYTES,
68					 SPRD_PCM_DMA_BRUST_LEN);
69	if (ret < 0)
70		return ret;
71
72	ret = snd_pcm_hw_constraint_integer(runtime,
73					    SNDRV_PCM_HW_PARAM_PERIODS);
74	if (ret < 0)
75		return ret;
76
77	dma_private = devm_kzalloc(dev, sizeof(*dma_private), GFP_KERNEL);
78	if (!dma_private)
79		return -ENOMEM;
80
81	size = runtime->hw.periods_max * SPRD_PCM_DMA_LINKLIST_SIZE;
82
83	for (i = 0; i < hw_chan; i++) {
84		struct sprd_pcm_dma_data *data = &dma_private->data[i];
85
86		data->virt = dmam_alloc_coherent(dev, size, &data->phys,
87						 GFP_KERNEL);
88		if (!data->virt) {
89			ret = -ENOMEM;
90			goto error;
91		}
92	}
93
94	dma_private->hw_chan = hw_chan;
95	runtime->private_data = dma_private;
96	dma_private->substream = substream;
97
98	return 0;
99
100error:
101	for (i = 0; i < hw_chan; i++) {
102		struct sprd_pcm_dma_data *data = &dma_private->data[i];
103
104		if (data->virt)
105			dmam_free_coherent(dev, size, data->virt, data->phys);
106	}
107
108	devm_kfree(dev, dma_private);
109	return ret;
110}
111
112static int sprd_pcm_close(struct snd_soc_component *component,
113			  struct snd_pcm_substream *substream)
114{
115	struct snd_pcm_runtime *runtime = substream->runtime;
116	struct sprd_pcm_dma_private *dma_private = runtime->private_data;
117	struct device *dev = component->dev;
118	int size = runtime->hw.periods_max * SPRD_PCM_DMA_LINKLIST_SIZE;
119	int i;
120
121	for (i = 0; i < dma_private->hw_chan; i++) {
122		struct sprd_pcm_dma_data *data = &dma_private->data[i];
123
124		dmam_free_coherent(dev, size, data->virt, data->phys);
125	}
126
127	devm_kfree(dev, dma_private);
128
129	return 0;
130}
131
132static void sprd_pcm_dma_complete(void *data)
133{
134	struct sprd_pcm_dma_private *dma_private = data;
135	struct snd_pcm_substream *substream = dma_private->substream;
136
137	snd_pcm_period_elapsed(substream);
138}
139
140static void sprd_pcm_release_dma_channel(struct snd_pcm_substream *substream)
141{
142	struct snd_pcm_runtime *runtime = substream->runtime;
143	struct sprd_pcm_dma_private *dma_private = runtime->private_data;
144	int i;
145
146	for (i = 0; i < SPRD_PCM_CHANNEL_MAX; i++) {
147		struct sprd_pcm_dma_data *data = &dma_private->data[i];
148
149		if (data->chan) {
150			dma_release_channel(data->chan);
151			data->chan = NULL;
152		}
153	}
154}
155
156static int sprd_pcm_request_dma_channel(struct snd_soc_component *component,
157					struct snd_pcm_substream *substream,
158					int channels)
159{
160	struct snd_pcm_runtime *runtime = substream->runtime;
161	struct sprd_pcm_dma_private *dma_private = runtime->private_data;
162	struct device *dev = component->dev;
163	struct sprd_pcm_dma_params *dma_params = dma_private->params;
164	int i;
165
166	if (channels > SPRD_PCM_CHANNEL_MAX) {
167		dev_err(dev, "invalid dma channel number:%d\n", channels);
168		return -EINVAL;
169	}
170
171	for (i = 0; i < channels; i++) {
172		struct sprd_pcm_dma_data *data = &dma_private->data[i];
173
174		data->chan = dma_request_slave_channel(dev,
175						       dma_params->chan_name[i]);
176		if (!data->chan) {
177			dev_err(dev, "failed to request dma channel:%s\n",
178				dma_params->chan_name[i]);
179			sprd_pcm_release_dma_channel(substream);
180			return -ENODEV;
181		}
182	}
183
184	return 0;
185}
186
187static int sprd_pcm_hw_params(struct snd_soc_component *component,
188			      struct snd_pcm_substream *substream,
189			      struct snd_pcm_hw_params *params)
190{
191	struct snd_pcm_runtime *runtime = substream->runtime;
192	struct sprd_pcm_dma_private *dma_private = runtime->private_data;
193	struct snd_soc_pcm_runtime *rtd = asoc_substream_to_rtd(substream);
194	struct sprd_pcm_dma_params *dma_params;
195	size_t totsize = params_buffer_bytes(params);
196	size_t period = params_period_bytes(params);
197	int channels = params_channels(params);
198	int is_playback = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
199	struct scatterlist *sg;
200	unsigned long flags;
201	int ret, i, j, sg_num;
202
203	dma_params = snd_soc_dai_get_dma_data(asoc_rtd_to_cpu(rtd, 0), substream);
204	if (!dma_params) {
205		dev_warn(component->dev, "no dma parameters setting\n");
206		dma_private->params = NULL;
207		snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
208		runtime->dma_bytes = totsize;
209		return 0;
210	}
211
212	if (!dma_private->params) {
213		dma_private->params = dma_params;
214		ret = sprd_pcm_request_dma_channel(component,
215						   substream, channels);
216		if (ret)
217			return ret;
218	}
219
220	snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
221
222	runtime->dma_bytes = totsize;
223	sg_num = totsize / period;
224	dma_private->dma_addr_offset = totsize / channels;
225
226	sg = devm_kcalloc(component->dev, sg_num, sizeof(*sg), GFP_KERNEL);
227	if (!sg) {
228		ret = -ENOMEM;
229		goto sg_err;
230	}
231
232	for (i = 0; i < channels; i++) {
233		struct sprd_pcm_dma_data *data = &dma_private->data[i];
234		struct dma_chan *chan = data->chan;
235		struct dma_slave_config config = { };
236		struct sprd_dma_linklist link = { };
237		enum dma_transfer_direction dir;
238		struct scatterlist *sgt = sg;
239
240		config.src_maxburst = dma_params->fragment_len[i];
241		config.src_addr_width = dma_params->datawidth[i];
242		config.dst_addr_width = dma_params->datawidth[i];
243		if (is_playback) {
244			config.src_addr = runtime->dma_addr +
245				i * dma_private->dma_addr_offset;
246			config.dst_addr = dma_params->dev_phys[i];
247			dir = DMA_MEM_TO_DEV;
248		} else {
249			config.src_addr = dma_params->dev_phys[i];
250			config.dst_addr = runtime->dma_addr +
251				i * dma_private->dma_addr_offset;
252			dir = DMA_DEV_TO_MEM;
253		}
254
255		sg_init_table(sgt, sg_num);
256		for (j = 0; j < sg_num; j++, sgt++) {
257			u32 sg_len = period / channels;
258
259			sg_dma_len(sgt) = sg_len;
260			sg_dma_address(sgt) = runtime->dma_addr +
261				i * dma_private->dma_addr_offset + sg_len * j;
262		}
263
264		/*
265		 * Configure the link-list address for the DMA engine link-list
266		 * mode.
267		 */
268		link.virt_addr = (unsigned long)data->virt;
269		link.phy_addr = data->phys;
270
271		ret = dmaengine_slave_config(chan, &config);
272		if (ret) {
273			dev_err(component->dev,
274				"failed to set slave configuration: %d\n", ret);
275			goto config_err;
276		}
277
278		/*
279		 * We configure the DMA request mode, interrupt mode, channel
280		 * mode and channel trigger mode by the flags.
281		 */
282		flags = SPRD_DMA_FLAGS(SPRD_DMA_CHN_MODE_NONE, SPRD_DMA_NO_TRG,
283				       SPRD_DMA_FRAG_REQ, SPRD_DMA_TRANS_INT);
284		data->desc = chan->device->device_prep_slave_sg(chan, sg,
285								sg_num, dir,
286								flags, &link);
287		if (!data->desc) {
288			dev_err(component->dev, "failed to prepare slave sg\n");
289			ret = -ENOMEM;
290			goto config_err;
291		}
292
293		if (!runtime->no_period_wakeup) {
294			data->desc->callback = sprd_pcm_dma_complete;
295			data->desc->callback_param = dma_private;
296		}
297	}
298
299	devm_kfree(component->dev, sg);
300
301	return 0;
302
303config_err:
304	devm_kfree(component->dev, sg);
305sg_err:
306	sprd_pcm_release_dma_channel(substream);
307	return ret;
308}
309
310static int sprd_pcm_hw_free(struct snd_soc_component *component,
311			    struct snd_pcm_substream *substream)
312{
313	snd_pcm_set_runtime_buffer(substream, NULL);
314	sprd_pcm_release_dma_channel(substream);
315
316	return 0;
317}
318
319static int sprd_pcm_trigger(struct snd_soc_component *component,
320			    struct snd_pcm_substream *substream, int cmd)
321{
322	struct sprd_pcm_dma_private *dma_private =
323		substream->runtime->private_data;
324	int ret = 0, i;
325
326	switch (cmd) {
327	case SNDRV_PCM_TRIGGER_START:
328		for (i = 0; i < dma_private->hw_chan; i++) {
329			struct sprd_pcm_dma_data *data = &dma_private->data[i];
330
331			if (!data->desc)
332				continue;
333
334			data->cookie = dmaengine_submit(data->desc);
335			ret = dma_submit_error(data->cookie);
336			if (ret) {
337				dev_err(component->dev,
338					"failed to submit dma request: %d\n",
339					ret);
340				return ret;
341			}
342
343			dma_async_issue_pending(data->chan);
344		}
345
346		break;
347	case SNDRV_PCM_TRIGGER_RESUME:
348	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
349		for (i = 0; i < dma_private->hw_chan; i++) {
350			struct sprd_pcm_dma_data *data = &dma_private->data[i];
351
352			if (data->chan)
353				dmaengine_resume(data->chan);
354		}
355
356		break;
357	case SNDRV_PCM_TRIGGER_STOP:
358		for (i = 0; i < dma_private->hw_chan; i++) {
359			struct sprd_pcm_dma_data *data = &dma_private->data[i];
360
361			if (data->chan)
362				dmaengine_terminate_async(data->chan);
363		}
364
365		break;
366	case SNDRV_PCM_TRIGGER_SUSPEND:
367	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
368		for (i = 0; i < dma_private->hw_chan; i++) {
369			struct sprd_pcm_dma_data *data = &dma_private->data[i];
370
371			if (data->chan)
372				dmaengine_pause(data->chan);
373		}
374
375		break;
376	default:
377		ret = -EINVAL;
378	}
379
380	return ret;
381}
382
383static snd_pcm_uframes_t sprd_pcm_pointer(struct snd_soc_component *component,
384					  struct snd_pcm_substream *substream)
385{
386	struct snd_pcm_runtime *runtime = substream->runtime;
387	struct sprd_pcm_dma_private *dma_private = runtime->private_data;
388	int pointer[SPRD_PCM_CHANNEL_MAX];
389	int bytes_of_pointer = 0, sel_max = 0, i;
390	snd_pcm_uframes_t x;
391	struct dma_tx_state state;
392	enum dma_status status;
393
394	for (i = 0; i < dma_private->hw_chan; i++) {
395		struct sprd_pcm_dma_data *data = &dma_private->data[i];
396
397		if (!data->chan)
398			continue;
399
400		status = dmaengine_tx_status(data->chan, data->cookie, &state);
401		if (status == DMA_ERROR) {
402			dev_err(component->dev,
403				"failed to get dma channel %d status\n", i);
404			return 0;
405		}
406
407		/*
408		 * We just get current transfer address from the DMA engine, so
409		 * we need convert to current pointer.
410		 */
411		pointer[i] = state.residue - runtime->dma_addr -
412			i * dma_private->dma_addr_offset;
413
414		if (i == 0) {
415			bytes_of_pointer = pointer[i];
416			sel_max = pointer[i] < data->pre_pointer ? 1 : 0;
417		} else {
418			sel_max ^= pointer[i] < data->pre_pointer ? 1 : 0;
419
420			if (sel_max)
421				bytes_of_pointer =
422					max(pointer[i], pointer[i - 1]) << 1;
423			else
424				bytes_of_pointer =
425					min(pointer[i], pointer[i - 1]) << 1;
426		}
427
428		data->pre_pointer = pointer[i];
429	}
430
431	x = bytes_to_frames(runtime, bytes_of_pointer);
432	if (x == runtime->buffer_size)
433		x = 0;
434
435	return x;
436}
437
438static int sprd_pcm_mmap(struct snd_soc_component *component,
439			 struct snd_pcm_substream *substream,
440			 struct vm_area_struct *vma)
441{
442	struct snd_pcm_runtime *runtime = substream->runtime;
443
444	vma->vm_page_prot = pgprot_writecombine(vma->vm_page_prot);
445	return remap_pfn_range(vma, vma->vm_start,
446			       runtime->dma_addr >> PAGE_SHIFT,
447			       vma->vm_end - vma->vm_start,
448			       vma->vm_page_prot);
449}
450
451static int sprd_pcm_new(struct snd_soc_component *component,
452			struct snd_soc_pcm_runtime *rtd)
453{
454	struct snd_card *card = rtd->card->snd_card;
455	struct snd_pcm *pcm = rtd->pcm;
456	struct snd_pcm_substream *substream;
457	int ret;
458
459	ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
460	if (ret)
461		return ret;
462
463	substream = pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream;
464	if (substream) {
465		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, card->dev,
466					  sprd_pcm_hardware.buffer_bytes_max,
467					  &substream->dma_buffer);
468		if (ret) {
469			dev_err(card->dev,
470				"can't alloc playback dma buffer: %d\n", ret);
471			return ret;
472		}
473	}
474
475	substream = pcm->streams[SNDRV_PCM_STREAM_CAPTURE].substream;
476	if (substream) {
477		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, card->dev,
478					  sprd_pcm_hardware.buffer_bytes_max,
479					  &substream->dma_buffer);
480		if (ret) {
481			dev_err(card->dev,
482				"can't alloc capture dma buffer: %d\n", ret);
483			snd_dma_free_pages(&pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream->dma_buffer);
484			return ret;
485		}
486	}
487
488	return 0;
489}
490
491static void sprd_pcm_free(struct snd_soc_component *component,
492			  struct snd_pcm *pcm)
493{
494	struct snd_pcm_substream *substream;
495	int i;
496
497	for (i = 0; i < ARRAY_SIZE(pcm->streams); i++) {
498		substream = pcm->streams[i].substream;
499		if (substream) {
500			snd_dma_free_pages(&substream->dma_buffer);
501			substream->dma_buffer.area = NULL;
502			substream->dma_buffer.addr = 0;
503		}
504	}
505}
506
507static const struct snd_soc_component_driver sprd_soc_component = {
508	.name		= DRV_NAME,
509	.open		= sprd_pcm_open,
510	.close		= sprd_pcm_close,
511	.hw_params	= sprd_pcm_hw_params,
512	.hw_free	= sprd_pcm_hw_free,
513	.trigger	= sprd_pcm_trigger,
514	.pointer	= sprd_pcm_pointer,
515	.mmap		= sprd_pcm_mmap,
516	.pcm_construct	= sprd_pcm_new,
517	.pcm_destruct	= sprd_pcm_free,
518	.compress_ops	= &sprd_platform_compress_ops,
519};
520
521static int sprd_soc_platform_probe(struct platform_device *pdev)
522{
523	struct device_node *np = pdev->dev.of_node;
524	int ret;
525
526	ret = of_reserved_mem_device_init_by_idx(&pdev->dev, np, 0);
527	if (ret)
528		dev_warn(&pdev->dev,
529			 "no reserved DMA memory for audio platform device\n");
530
531	ret = devm_snd_soc_register_component(&pdev->dev, &sprd_soc_component,
532					      NULL, 0);
533	if (ret)
534		dev_err(&pdev->dev, "could not register platform:%d\n", ret);
535
536	return ret;
537}
538
539static const struct of_device_id sprd_pcm_of_match[] = {
540	{ .compatible = "sprd,pcm-platform", },
541	{ },
542};
543MODULE_DEVICE_TABLE(of, sprd_pcm_of_match);
544
545static struct platform_driver sprd_pcm_driver = {
546	.driver = {
547		.name = "sprd-pcm-audio",
548		.of_match_table = sprd_pcm_of_match,
549	},
550
551	.probe = sprd_soc_platform_probe,
552};
553
554module_platform_driver(sprd_pcm_driver);
555
556MODULE_DESCRIPTION("Spreadtrum ASoC PCM DMA");
557MODULE_LICENSE("GPL v2");
558MODULE_ALIAS("platform:sprd-audio");
559