xref: /linux/sound/soc/sh/rz-ssi.c (revision 9a6d7c4f)
1 // SPDX-License-Identifier: GPL-2.0
2 //
3 // Renesas RZ/G2L ASoC Serial Sound Interface (SSIF-2) Driver
4 //
5 // Copyright (C) 2021 Renesas Electronics Corp.
6 // Copyright (C) 2019 Chris Brandt.
7 //
8 
9 #include <linux/clk.h>
10 #include <linux/dmaengine.h>
11 #include <linux/io.h>
12 #include <linux/module.h>
13 #include <linux/pm_runtime.h>
14 #include <linux/reset.h>
15 #include <sound/soc.h>
16 
17 /* REGISTER OFFSET */
18 #define SSICR			0x000
19 #define SSISR			0x004
20 #define SSIFCR			0x010
21 #define SSIFSR			0x014
22 #define SSIFTDR			0x018
23 #define SSIFRDR			0x01c
24 #define SSIOFR			0x020
25 #define SSISCR			0x024
26 
27 /* SSI REGISTER BITS */
28 #define SSICR_DWL(x)		(((x) & 0x7) << 19)
29 #define SSICR_SWL(x)		(((x) & 0x7) << 16)
30 
31 #define SSICR_CKS		BIT(30)
32 #define SSICR_TUIEN		BIT(29)
33 #define SSICR_TOIEN		BIT(28)
34 #define SSICR_RUIEN		BIT(27)
35 #define SSICR_ROIEN		BIT(26)
36 #define SSICR_MST		BIT(14)
37 #define SSICR_BCKP		BIT(13)
38 #define SSICR_LRCKP		BIT(12)
39 #define SSICR_CKDV(x)		(((x) & 0xf) << 4)
40 #define SSICR_TEN		BIT(1)
41 #define SSICR_REN		BIT(0)
42 
43 #define SSISR_TUIRQ		BIT(29)
44 #define SSISR_TOIRQ		BIT(28)
45 #define SSISR_RUIRQ		BIT(27)
46 #define SSISR_ROIRQ		BIT(26)
47 #define SSISR_IIRQ		BIT(25)
48 
49 #define SSIFCR_AUCKE		BIT(31)
50 #define SSIFCR_SSIRST		BIT(16)
51 #define SSIFCR_TIE		BIT(3)
52 #define SSIFCR_RIE		BIT(2)
53 #define SSIFCR_TFRST		BIT(1)
54 #define SSIFCR_RFRST		BIT(0)
55 
56 #define SSIFSR_TDC_MASK		0x3f
57 #define SSIFSR_TDC_SHIFT	24
58 #define SSIFSR_RDC_MASK		0x3f
59 #define SSIFSR_RDC_SHIFT	8
60 
61 #define SSIFSR_TDE		BIT(16)
62 #define SSIFSR_RDF		BIT(0)
63 
64 #define SSIOFR_LRCONT		BIT(8)
65 
66 #define SSISCR_TDES(x)		(((x) & 0x1f) << 8)
67 #define SSISCR_RDFS(x)		(((x) & 0x1f) << 0)
68 
69 /* Pre allocated buffers sizes */
70 #define PREALLOC_BUFFER		(SZ_32K)
71 #define PREALLOC_BUFFER_MAX	(SZ_32K)
72 
73 #define SSI_RATES		SNDRV_PCM_RATE_8000_48000 /* 8k-44.1kHz */
74 #define SSI_FMTS		SNDRV_PCM_FMTBIT_S16_LE
75 #define SSI_CHAN_MIN		2
76 #define SSI_CHAN_MAX		2
77 #define SSI_FIFO_DEPTH		32
78 
79 struct rz_ssi_priv;
80 
81 struct rz_ssi_stream {
82 	struct rz_ssi_priv *priv;
83 	struct snd_pcm_substream *substream;
84 	int fifo_sample_size;	/* sample capacity of SSI FIFO */
85 	int dma_buffer_pos;	/* The address for the next DMA descriptor */
86 	int period_counter;	/* for keeping track of periods transferred */
87 	int sample_width;
88 	int buffer_pos;		/* current frame position in the buffer */
89 	int running;		/* 0=stopped, 1=running */
90 
91 	int uerr_num;
92 	int oerr_num;
93 
94 	struct dma_chan *dma_ch;
95 
96 	int (*transfer)(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm);
97 };
98 
99 struct rz_ssi_priv {
100 	void __iomem *base;
101 	struct platform_device *pdev;
102 	struct reset_control *rstc;
103 	struct device *dev;
104 	struct clk *sfr_clk;
105 	struct clk *clk;
106 
107 	phys_addr_t phys;
108 	int irq_int;
109 	int irq_tx;
110 	int irq_rx;
111 	int irq_rt;
112 
113 	spinlock_t lock;
114 
115 	/*
116 	 * The SSI supports full-duplex transmission and reception.
117 	 * However, if an error occurs, channel reset (both transmission
118 	 * and reception reset) is required.
119 	 * So it is better to use as half-duplex (playing and recording
120 	 * should be done on separate channels).
121 	 */
122 	struct rz_ssi_stream playback;
123 	struct rz_ssi_stream capture;
124 
125 	/* clock */
126 	unsigned long audio_mck;
127 	unsigned long audio_clk_1;
128 	unsigned long audio_clk_2;
129 
130 	bool lrckp_fsync_fall;	/* LR clock polarity (SSICR.LRCKP) */
131 	bool bckp_rise;	/* Bit clock polarity (SSICR.BCKP) */
132 	bool dma_rt;
133 };
134 
135 static void rz_ssi_dma_complete(void *data);
136 
rz_ssi_reg_writel(struct rz_ssi_priv * priv,uint reg,u32 data)137 static void rz_ssi_reg_writel(struct rz_ssi_priv *priv, uint reg, u32 data)
138 {
139 	writel(data, (priv->base + reg));
140 }
141 
rz_ssi_reg_readl(struct rz_ssi_priv * priv,uint reg)142 static u32 rz_ssi_reg_readl(struct rz_ssi_priv *priv, uint reg)
143 {
144 	return readl(priv->base + reg);
145 }
146 
rz_ssi_reg_mask_setl(struct rz_ssi_priv * priv,uint reg,u32 bclr,u32 bset)147 static void rz_ssi_reg_mask_setl(struct rz_ssi_priv *priv, uint reg,
148 				 u32 bclr, u32 bset)
149 {
150 	u32 val;
151 
152 	val = readl(priv->base + reg);
153 	val = (val & ~bclr) | bset;
154 	writel(val, (priv->base + reg));
155 }
156 
157 static inline struct snd_soc_dai *
rz_ssi_get_dai(struct snd_pcm_substream * substream)158 rz_ssi_get_dai(struct snd_pcm_substream *substream)
159 {
160 	struct snd_soc_pcm_runtime *rtd = snd_soc_substream_to_rtd(substream);
161 
162 	return snd_soc_rtd_to_cpu(rtd, 0);
163 }
164 
rz_ssi_stream_is_play(struct rz_ssi_priv * ssi,struct snd_pcm_substream * substream)165 static inline bool rz_ssi_stream_is_play(struct rz_ssi_priv *ssi,
166 					 struct snd_pcm_substream *substream)
167 {
168 	return substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
169 }
170 
171 static inline struct rz_ssi_stream *
rz_ssi_stream_get(struct rz_ssi_priv * ssi,struct snd_pcm_substream * substream)172 rz_ssi_stream_get(struct rz_ssi_priv *ssi, struct snd_pcm_substream *substream)
173 {
174 	struct rz_ssi_stream *stream = &ssi->playback;
175 
176 	if (substream->stream != SNDRV_PCM_STREAM_PLAYBACK)
177 		stream = &ssi->capture;
178 
179 	return stream;
180 }
181 
rz_ssi_is_dma_enabled(struct rz_ssi_priv * ssi)182 static inline bool rz_ssi_is_dma_enabled(struct rz_ssi_priv *ssi)
183 {
184 	return (ssi->playback.dma_ch && (ssi->dma_rt || ssi->capture.dma_ch));
185 }
186 
rz_ssi_set_substream(struct rz_ssi_stream * strm,struct snd_pcm_substream * substream)187 static void rz_ssi_set_substream(struct rz_ssi_stream *strm,
188 				 struct snd_pcm_substream *substream)
189 {
190 	struct rz_ssi_priv *ssi = strm->priv;
191 	unsigned long flags;
192 
193 	spin_lock_irqsave(&ssi->lock, flags);
194 	strm->substream = substream;
195 	spin_unlock_irqrestore(&ssi->lock, flags);
196 }
197 
rz_ssi_stream_is_valid(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)198 static bool rz_ssi_stream_is_valid(struct rz_ssi_priv *ssi,
199 				   struct rz_ssi_stream *strm)
200 {
201 	unsigned long flags;
202 	bool ret;
203 
204 	spin_lock_irqsave(&ssi->lock, flags);
205 	ret = strm->substream && strm->substream->runtime;
206 	spin_unlock_irqrestore(&ssi->lock, flags);
207 
208 	return ret;
209 }
210 
rz_ssi_stream_init(struct rz_ssi_stream * strm,struct snd_pcm_substream * substream)211 static void rz_ssi_stream_init(struct rz_ssi_stream *strm,
212 			       struct snd_pcm_substream *substream)
213 {
214 	struct snd_pcm_runtime *runtime = substream->runtime;
215 
216 	rz_ssi_set_substream(strm, substream);
217 	strm->sample_width = samples_to_bytes(runtime, 1);
218 	strm->dma_buffer_pos = 0;
219 	strm->period_counter = 0;
220 	strm->buffer_pos = 0;
221 
222 	strm->oerr_num = 0;
223 	strm->uerr_num = 0;
224 	strm->running = 0;
225 
226 	/* fifo init */
227 	strm->fifo_sample_size = SSI_FIFO_DEPTH;
228 }
229 
rz_ssi_stream_quit(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)230 static void rz_ssi_stream_quit(struct rz_ssi_priv *ssi,
231 			       struct rz_ssi_stream *strm)
232 {
233 	struct snd_soc_dai *dai = rz_ssi_get_dai(strm->substream);
234 
235 	rz_ssi_set_substream(strm, NULL);
236 
237 	if (strm->oerr_num > 0)
238 		dev_info(dai->dev, "overrun = %d\n", strm->oerr_num);
239 
240 	if (strm->uerr_num > 0)
241 		dev_info(dai->dev, "underrun = %d\n", strm->uerr_num);
242 }
243 
rz_ssi_clk_setup(struct rz_ssi_priv * ssi,unsigned int rate,unsigned int channels)244 static int rz_ssi_clk_setup(struct rz_ssi_priv *ssi, unsigned int rate,
245 			    unsigned int channels)
246 {
247 	static s8 ckdv[16] = { 1,  2,  4,  8, 16, 32, 64, 128,
248 			       6, 12, 24, 48, 96, -1, -1, -1 };
249 	unsigned int channel_bits = 32;	/* System Word Length */
250 	unsigned long bclk_rate = rate * channels * channel_bits;
251 	unsigned int div;
252 	unsigned int i;
253 	u32 ssicr = 0;
254 	u32 clk_ckdv;
255 
256 	/* Clear AUCKE so we can set MST */
257 	rz_ssi_reg_writel(ssi, SSIFCR, 0);
258 
259 	/* Continue to output LRCK pin even when idle */
260 	rz_ssi_reg_writel(ssi, SSIOFR, SSIOFR_LRCONT);
261 	if (ssi->audio_clk_1 && ssi->audio_clk_2) {
262 		if (ssi->audio_clk_1 % bclk_rate)
263 			ssi->audio_mck = ssi->audio_clk_2;
264 		else
265 			ssi->audio_mck = ssi->audio_clk_1;
266 	}
267 
268 	/* Clock setting */
269 	ssicr |= SSICR_MST;
270 	if (ssi->audio_mck == ssi->audio_clk_1)
271 		ssicr |= SSICR_CKS;
272 	if (ssi->bckp_rise)
273 		ssicr |= SSICR_BCKP;
274 	if (ssi->lrckp_fsync_fall)
275 		ssicr |= SSICR_LRCKP;
276 
277 	/* Determine the clock divider */
278 	clk_ckdv = 0;
279 	div = ssi->audio_mck / bclk_rate;
280 	/* try to find an match */
281 	for (i = 0; i < ARRAY_SIZE(ckdv); i++) {
282 		if (ckdv[i] == div) {
283 			clk_ckdv = i;
284 			break;
285 		}
286 	}
287 
288 	if (i == ARRAY_SIZE(ckdv)) {
289 		dev_err(ssi->dev, "Rate not divisible by audio clock source\n");
290 		return -EINVAL;
291 	}
292 
293 	/*
294 	 * DWL: Data Word Length = 16 bits
295 	 * SWL: System Word Length = 32 bits
296 	 */
297 	ssicr |= SSICR_CKDV(clk_ckdv);
298 	ssicr |= SSICR_DWL(1) | SSICR_SWL(3);
299 	rz_ssi_reg_writel(ssi, SSICR, ssicr);
300 	rz_ssi_reg_writel(ssi, SSIFCR,
301 			  (SSIFCR_AUCKE | SSIFCR_TFRST | SSIFCR_RFRST));
302 
303 	return 0;
304 }
305 
rz_ssi_start(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)306 static int rz_ssi_start(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
307 {
308 	bool is_play = rz_ssi_stream_is_play(ssi, strm->substream);
309 	u32 ssicr, ssifcr;
310 
311 	ssicr = rz_ssi_reg_readl(ssi, SSICR);
312 	ssifcr = rz_ssi_reg_readl(ssi, SSIFCR) & ~0xF;
313 
314 	/* FIFO interrupt thresholds */
315 	if (rz_ssi_is_dma_enabled(ssi))
316 		rz_ssi_reg_writel(ssi, SSISCR, 0);
317 	else
318 		rz_ssi_reg_writel(ssi, SSISCR,
319 				  SSISCR_TDES(strm->fifo_sample_size / 2 - 1) |
320 				  SSISCR_RDFS(0));
321 
322 	/* enable IRQ */
323 	if (is_play) {
324 		ssicr |= SSICR_TUIEN | SSICR_TOIEN;
325 		ssifcr |= SSIFCR_TIE | SSIFCR_RFRST;
326 	} else {
327 		ssicr |= SSICR_RUIEN | SSICR_ROIEN;
328 		ssifcr |= SSIFCR_RIE | SSIFCR_TFRST;
329 	}
330 
331 	rz_ssi_reg_writel(ssi, SSICR, ssicr);
332 	rz_ssi_reg_writel(ssi, SSIFCR, ssifcr);
333 
334 	/* Clear all error flags */
335 	rz_ssi_reg_mask_setl(ssi, SSISR,
336 			     (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ |
337 			      SSISR_RUIRQ), 0);
338 
339 	strm->running = 1;
340 	ssicr |= is_play ? SSICR_TEN : SSICR_REN;
341 	rz_ssi_reg_writel(ssi, SSICR, ssicr);
342 
343 	return 0;
344 }
345 
rz_ssi_stop(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)346 static int rz_ssi_stop(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
347 {
348 	int timeout;
349 
350 	strm->running = 0;
351 
352 	/* Disable TX/RX */
353 	rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TEN | SSICR_REN, 0);
354 
355 	/* Cancel all remaining DMA transactions */
356 	if (rz_ssi_is_dma_enabled(ssi))
357 		dmaengine_terminate_async(strm->dma_ch);
358 
359 	/* Disable irqs */
360 	rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TUIEN | SSICR_TOIEN |
361 			     SSICR_RUIEN | SSICR_ROIEN, 0);
362 	rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_TIE | SSIFCR_RIE, 0);
363 
364 	/* Clear all error flags */
365 	rz_ssi_reg_mask_setl(ssi, SSISR,
366 			     (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ |
367 			      SSISR_RUIRQ), 0);
368 
369 	/* Wait for idle */
370 	timeout = 100;
371 	while (--timeout) {
372 		if (rz_ssi_reg_readl(ssi, SSISR) & SSISR_IIRQ)
373 			break;
374 		udelay(1);
375 	}
376 
377 	if (!timeout)
378 		dev_info(ssi->dev, "timeout waiting for SSI idle\n");
379 
380 	/* Hold FIFOs in reset */
381 	rz_ssi_reg_mask_setl(ssi, SSIFCR, 0,
382 			     SSIFCR_TFRST | SSIFCR_RFRST);
383 
384 	return 0;
385 }
386 
rz_ssi_pointer_update(struct rz_ssi_stream * strm,int frames)387 static void rz_ssi_pointer_update(struct rz_ssi_stream *strm, int frames)
388 {
389 	struct snd_pcm_substream *substream = strm->substream;
390 	struct snd_pcm_runtime *runtime;
391 	int current_period;
392 
393 	if (!strm->running || !substream || !substream->runtime)
394 		return;
395 
396 	runtime = substream->runtime;
397 	strm->buffer_pos += frames;
398 	WARN_ON(strm->buffer_pos > runtime->buffer_size);
399 
400 	/* ring buffer */
401 	if (strm->buffer_pos == runtime->buffer_size)
402 		strm->buffer_pos = 0;
403 
404 	current_period = strm->buffer_pos / runtime->period_size;
405 	if (strm->period_counter != current_period) {
406 		snd_pcm_period_elapsed(strm->substream);
407 		strm->period_counter = current_period;
408 	}
409 }
410 
rz_ssi_pio_recv(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)411 static int rz_ssi_pio_recv(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
412 {
413 	struct snd_pcm_substream *substream = strm->substream;
414 	struct snd_pcm_runtime *runtime;
415 	u16 *buf;
416 	int fifo_samples;
417 	int frames_left;
418 	int samples;
419 	int i;
420 
421 	if (!rz_ssi_stream_is_valid(ssi, strm))
422 		return -EINVAL;
423 
424 	runtime = substream->runtime;
425 
426 	do {
427 		/* frames left in this period */
428 		frames_left = runtime->period_size -
429 			      (strm->buffer_pos % runtime->period_size);
430 		if (!frames_left)
431 			frames_left = runtime->period_size;
432 
433 		/* Samples in RX FIFO */
434 		fifo_samples = (rz_ssi_reg_readl(ssi, SSIFSR) >>
435 				SSIFSR_RDC_SHIFT) & SSIFSR_RDC_MASK;
436 
437 		/* Only read full frames at a time */
438 		samples = 0;
439 		while (frames_left && (fifo_samples >= runtime->channels)) {
440 			samples += runtime->channels;
441 			fifo_samples -= runtime->channels;
442 			frames_left--;
443 		}
444 
445 		/* not enough samples yet */
446 		if (!samples)
447 			break;
448 
449 		/* calculate new buffer index */
450 		buf = (u16 *)runtime->dma_area;
451 		buf += strm->buffer_pos * runtime->channels;
452 
453 		/* Note, only supports 16-bit samples */
454 		for (i = 0; i < samples; i++)
455 			*buf++ = (u16)(rz_ssi_reg_readl(ssi, SSIFRDR) >> 16);
456 
457 		rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
458 		rz_ssi_pointer_update(strm, samples / runtime->channels);
459 	} while (!frames_left && fifo_samples >= runtime->channels);
460 
461 	return 0;
462 }
463 
rz_ssi_pio_send(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)464 static int rz_ssi_pio_send(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
465 {
466 	struct snd_pcm_substream *substream = strm->substream;
467 	struct snd_pcm_runtime *runtime = substream->runtime;
468 	int sample_space;
469 	int samples = 0;
470 	int frames_left;
471 	int i;
472 	u32 ssifsr;
473 	u16 *buf;
474 
475 	if (!rz_ssi_stream_is_valid(ssi, strm))
476 		return -EINVAL;
477 
478 	/* frames left in this period */
479 	frames_left = runtime->period_size - (strm->buffer_pos %
480 					      runtime->period_size);
481 	if (frames_left == 0)
482 		frames_left = runtime->period_size;
483 
484 	sample_space = strm->fifo_sample_size;
485 	ssifsr = rz_ssi_reg_readl(ssi, SSIFSR);
486 	sample_space -= (ssifsr >> SSIFSR_TDC_SHIFT) & SSIFSR_TDC_MASK;
487 
488 	/* Only add full frames at a time */
489 	while (frames_left && (sample_space >= runtime->channels)) {
490 		samples += runtime->channels;
491 		sample_space -= runtime->channels;
492 		frames_left--;
493 	}
494 
495 	/* no space to send anything right now */
496 	if (samples == 0)
497 		return 0;
498 
499 	/* calculate new buffer index */
500 	buf = (u16 *)(runtime->dma_area);
501 	buf += strm->buffer_pos * runtime->channels;
502 
503 	/* Note, only supports 16-bit samples */
504 	for (i = 0; i < samples; i++)
505 		rz_ssi_reg_writel(ssi, SSIFTDR, ((u32)(*buf++) << 16));
506 
507 	rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_TDE, 0);
508 	rz_ssi_pointer_update(strm, samples / runtime->channels);
509 
510 	return 0;
511 }
512 
rz_ssi_interrupt(int irq,void * data)513 static irqreturn_t rz_ssi_interrupt(int irq, void *data)
514 {
515 	struct rz_ssi_stream *strm = NULL;
516 	struct rz_ssi_priv *ssi = data;
517 	u32 ssisr = rz_ssi_reg_readl(ssi, SSISR);
518 
519 	if (ssi->playback.substream)
520 		strm = &ssi->playback;
521 	else if (ssi->capture.substream)
522 		strm = &ssi->capture;
523 	else
524 		return IRQ_HANDLED; /* Left over TX/RX interrupt */
525 
526 	if (irq == ssi->irq_int) { /* error or idle */
527 		if (ssisr & SSISR_TUIRQ)
528 			strm->uerr_num++;
529 		if (ssisr & SSISR_TOIRQ)
530 			strm->oerr_num++;
531 		if (ssisr & SSISR_RUIRQ)
532 			strm->uerr_num++;
533 		if (ssisr & SSISR_ROIRQ)
534 			strm->oerr_num++;
535 
536 		if (ssisr & (SSISR_TUIRQ | SSISR_TOIRQ | SSISR_RUIRQ |
537 			     SSISR_ROIRQ)) {
538 			/* Error handling */
539 			/* You must reset (stop/restart) after each interrupt */
540 			rz_ssi_stop(ssi, strm);
541 
542 			/* Clear all flags */
543 			rz_ssi_reg_mask_setl(ssi, SSISR, SSISR_TOIRQ |
544 					     SSISR_TUIRQ | SSISR_ROIRQ |
545 					     SSISR_RUIRQ, 0);
546 
547 			/* Add/remove more data */
548 			strm->transfer(ssi, strm);
549 
550 			/* Resume */
551 			rz_ssi_start(ssi, strm);
552 		}
553 	}
554 
555 	if (!strm->running)
556 		return IRQ_HANDLED;
557 
558 	/* tx data empty */
559 	if (irq == ssi->irq_tx)
560 		strm->transfer(ssi, &ssi->playback);
561 
562 	/* rx data full */
563 	if (irq == ssi->irq_rx) {
564 		strm->transfer(ssi, &ssi->capture);
565 		rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
566 	}
567 
568 	if (irq == ssi->irq_rt) {
569 		struct snd_pcm_substream *substream = strm->substream;
570 
571 		if (rz_ssi_stream_is_play(ssi, substream)) {
572 			strm->transfer(ssi, &ssi->playback);
573 		} else {
574 			strm->transfer(ssi, &ssi->capture);
575 			rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
576 		}
577 	}
578 
579 	return IRQ_HANDLED;
580 }
581 
rz_ssi_dma_slave_config(struct rz_ssi_priv * ssi,struct dma_chan * dma_ch,bool is_play)582 static int rz_ssi_dma_slave_config(struct rz_ssi_priv *ssi,
583 				   struct dma_chan *dma_ch, bool is_play)
584 {
585 	struct dma_slave_config cfg;
586 
587 	memset(&cfg, 0, sizeof(cfg));
588 
589 	cfg.direction = is_play ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
590 	cfg.dst_addr = ssi->phys + SSIFTDR;
591 	cfg.src_addr = ssi->phys + SSIFRDR;
592 	cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
593 	cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
594 
595 	return dmaengine_slave_config(dma_ch, &cfg);
596 }
597 
rz_ssi_dma_transfer(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)598 static int rz_ssi_dma_transfer(struct rz_ssi_priv *ssi,
599 			       struct rz_ssi_stream *strm)
600 {
601 	struct snd_pcm_substream *substream = strm->substream;
602 	struct dma_async_tx_descriptor *desc;
603 	struct snd_pcm_runtime *runtime;
604 	enum dma_transfer_direction dir;
605 	u32 dma_paddr, dma_size;
606 	int amount;
607 
608 	if (!rz_ssi_stream_is_valid(ssi, strm))
609 		return -EINVAL;
610 
611 	runtime = substream->runtime;
612 	if (runtime->state == SNDRV_PCM_STATE_DRAINING)
613 		/*
614 		 * Stream is ending, so do not queue up any more DMA
615 		 * transfers otherwise we play partial sound clips
616 		 * because we can't shut off the DMA quick enough.
617 		 */
618 		return 0;
619 
620 	dir = rz_ssi_stream_is_play(ssi, substream) ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
621 
622 	/* Always transfer 1 period */
623 	amount = runtime->period_size;
624 
625 	/* DMA physical address and size */
626 	dma_paddr = runtime->dma_addr + frames_to_bytes(runtime,
627 							strm->dma_buffer_pos);
628 	dma_size = frames_to_bytes(runtime, amount);
629 	desc = dmaengine_prep_slave_single(strm->dma_ch, dma_paddr, dma_size,
630 					   dir,
631 					   DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
632 	if (!desc) {
633 		dev_err(ssi->dev, "dmaengine_prep_slave_single() fail\n");
634 		return -ENOMEM;
635 	}
636 
637 	desc->callback = rz_ssi_dma_complete;
638 	desc->callback_param = strm;
639 
640 	if (dmaengine_submit(desc) < 0) {
641 		dev_err(ssi->dev, "dmaengine_submit() fail\n");
642 		return -EIO;
643 	}
644 
645 	/* Update DMA pointer */
646 	strm->dma_buffer_pos += amount;
647 	if (strm->dma_buffer_pos >= runtime->buffer_size)
648 		strm->dma_buffer_pos = 0;
649 
650 	/* Start DMA */
651 	dma_async_issue_pending(strm->dma_ch);
652 
653 	return 0;
654 }
655 
rz_ssi_dma_complete(void * data)656 static void rz_ssi_dma_complete(void *data)
657 {
658 	struct rz_ssi_stream *strm = (struct rz_ssi_stream *)data;
659 
660 	if (!strm->running || !strm->substream || !strm->substream->runtime)
661 		return;
662 
663 	/* Note that next DMA transaction has probably already started */
664 	rz_ssi_pointer_update(strm, strm->substream->runtime->period_size);
665 
666 	/* Queue up another DMA transaction */
667 	rz_ssi_dma_transfer(strm->priv, strm);
668 }
669 
rz_ssi_release_dma_channels(struct rz_ssi_priv * ssi)670 static void rz_ssi_release_dma_channels(struct rz_ssi_priv *ssi)
671 {
672 	if (ssi->playback.dma_ch) {
673 		dma_release_channel(ssi->playback.dma_ch);
674 		ssi->playback.dma_ch = NULL;
675 		if (ssi->dma_rt)
676 			ssi->dma_rt = false;
677 	}
678 
679 	if (ssi->capture.dma_ch) {
680 		dma_release_channel(ssi->capture.dma_ch);
681 		ssi->capture.dma_ch = NULL;
682 	}
683 }
684 
rz_ssi_dma_request(struct rz_ssi_priv * ssi,struct device * dev)685 static int rz_ssi_dma_request(struct rz_ssi_priv *ssi, struct device *dev)
686 {
687 	ssi->playback.dma_ch = dma_request_chan(dev, "tx");
688 	if (IS_ERR(ssi->playback.dma_ch))
689 		ssi->playback.dma_ch = NULL;
690 
691 	ssi->capture.dma_ch = dma_request_chan(dev, "rx");
692 	if (IS_ERR(ssi->capture.dma_ch))
693 		ssi->capture.dma_ch = NULL;
694 
695 	if (!ssi->playback.dma_ch && !ssi->capture.dma_ch) {
696 		ssi->playback.dma_ch = dma_request_chan(dev, "rt");
697 		if (IS_ERR(ssi->playback.dma_ch)) {
698 			ssi->playback.dma_ch = NULL;
699 			goto no_dma;
700 		}
701 
702 		ssi->dma_rt = true;
703 	}
704 
705 	if (!rz_ssi_is_dma_enabled(ssi))
706 		goto no_dma;
707 
708 	if (ssi->playback.dma_ch &&
709 	    (rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch, true) < 0))
710 		goto no_dma;
711 
712 	if (ssi->capture.dma_ch &&
713 	    (rz_ssi_dma_slave_config(ssi, ssi->capture.dma_ch, false) < 0))
714 		goto no_dma;
715 
716 	return 0;
717 
718 no_dma:
719 	rz_ssi_release_dma_channels(ssi);
720 
721 	return -ENODEV;
722 }
723 
rz_ssi_dai_trigger(struct snd_pcm_substream * substream,int cmd,struct snd_soc_dai * dai)724 static int rz_ssi_dai_trigger(struct snd_pcm_substream *substream, int cmd,
725 			      struct snd_soc_dai *dai)
726 {
727 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
728 	struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream);
729 	int ret = 0, i, num_transfer = 1;
730 
731 	switch (cmd) {
732 	case SNDRV_PCM_TRIGGER_START:
733 		/* Soft Reset */
734 		rz_ssi_reg_mask_setl(ssi, SSIFCR, 0, SSIFCR_SSIRST);
735 		rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_SSIRST, 0);
736 		udelay(5);
737 
738 		rz_ssi_stream_init(strm, substream);
739 
740 		if (ssi->dma_rt) {
741 			bool is_playback;
742 
743 			is_playback = rz_ssi_stream_is_play(ssi, substream);
744 			ret = rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch,
745 						      is_playback);
746 			/* Fallback to pio */
747 			if (ret < 0) {
748 				ssi->playback.transfer = rz_ssi_pio_send;
749 				ssi->capture.transfer = rz_ssi_pio_recv;
750 				rz_ssi_release_dma_channels(ssi);
751 			}
752 		}
753 
754 		/* For DMA, queue up multiple DMA descriptors */
755 		if (rz_ssi_is_dma_enabled(ssi))
756 			num_transfer = 4;
757 
758 		for (i = 0; i < num_transfer; i++) {
759 			ret = strm->transfer(ssi, strm);
760 			if (ret)
761 				goto done;
762 		}
763 
764 		ret = rz_ssi_start(ssi, strm);
765 		break;
766 	case SNDRV_PCM_TRIGGER_STOP:
767 		rz_ssi_stop(ssi, strm);
768 		rz_ssi_stream_quit(ssi, strm);
769 		break;
770 	}
771 
772 done:
773 	return ret;
774 }
775 
rz_ssi_dai_set_fmt(struct snd_soc_dai * dai,unsigned int fmt)776 static int rz_ssi_dai_set_fmt(struct snd_soc_dai *dai, unsigned int fmt)
777 {
778 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
779 
780 	switch (fmt & SND_SOC_DAIFMT_CLOCK_PROVIDER_MASK) {
781 	case SND_SOC_DAIFMT_BP_FP:
782 		break;
783 	default:
784 		dev_err(ssi->dev, "Codec should be clk and frame consumer\n");
785 		return -EINVAL;
786 	}
787 
788 	/*
789 	 * set clock polarity
790 	 *
791 	 * "normal" BCLK = Signal is available at rising edge of BCLK
792 	 * "normal" FSYNC = (I2S) Left ch starts with falling FSYNC edge
793 	 */
794 	switch (fmt & SND_SOC_DAIFMT_INV_MASK) {
795 	case SND_SOC_DAIFMT_NB_NF:
796 		ssi->bckp_rise = false;
797 		ssi->lrckp_fsync_fall = false;
798 		break;
799 	case SND_SOC_DAIFMT_NB_IF:
800 		ssi->bckp_rise = false;
801 		ssi->lrckp_fsync_fall = true;
802 		break;
803 	case SND_SOC_DAIFMT_IB_NF:
804 		ssi->bckp_rise = true;
805 		ssi->lrckp_fsync_fall = false;
806 		break;
807 	case SND_SOC_DAIFMT_IB_IF:
808 		ssi->bckp_rise = true;
809 		ssi->lrckp_fsync_fall = true;
810 		break;
811 	default:
812 		return -EINVAL;
813 	}
814 
815 	/* only i2s support */
816 	switch (fmt & SND_SOC_DAIFMT_FORMAT_MASK) {
817 	case SND_SOC_DAIFMT_I2S:
818 		break;
819 	default:
820 		dev_err(ssi->dev, "Only I2S mode is supported.\n");
821 		return -EINVAL;
822 	}
823 
824 	return 0;
825 }
826 
rz_ssi_dai_hw_params(struct snd_pcm_substream * substream,struct snd_pcm_hw_params * params,struct snd_soc_dai * dai)827 static int rz_ssi_dai_hw_params(struct snd_pcm_substream *substream,
828 				struct snd_pcm_hw_params *params,
829 				struct snd_soc_dai *dai)
830 {
831 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
832 	unsigned int sample_bits = hw_param_interval(params,
833 					SNDRV_PCM_HW_PARAM_SAMPLE_BITS)->min;
834 	unsigned int channels = params_channels(params);
835 
836 	if (sample_bits != 16) {
837 		dev_err(ssi->dev, "Unsupported sample width: %d\n",
838 			sample_bits);
839 		return -EINVAL;
840 	}
841 
842 	if (channels != 2) {
843 		dev_err(ssi->dev, "Number of channels not matched: %d\n",
844 			channels);
845 		return -EINVAL;
846 	}
847 
848 	return rz_ssi_clk_setup(ssi, params_rate(params),
849 				params_channels(params));
850 }
851 
852 static const struct snd_soc_dai_ops rz_ssi_dai_ops = {
853 	.trigger	= rz_ssi_dai_trigger,
854 	.set_fmt	= rz_ssi_dai_set_fmt,
855 	.hw_params	= rz_ssi_dai_hw_params,
856 };
857 
858 static const struct snd_pcm_hardware rz_ssi_pcm_hardware = {
859 	.info			= SNDRV_PCM_INFO_INTERLEAVED	|
860 				  SNDRV_PCM_INFO_MMAP		|
861 				  SNDRV_PCM_INFO_MMAP_VALID,
862 	.buffer_bytes_max	= PREALLOC_BUFFER,
863 	.period_bytes_min	= 32,
864 	.period_bytes_max	= 8192,
865 	.channels_min		= SSI_CHAN_MIN,
866 	.channels_max		= SSI_CHAN_MAX,
867 	.periods_min		= 1,
868 	.periods_max		= 32,
869 	.fifo_size		= 32 * 2,
870 };
871 
rz_ssi_pcm_open(struct snd_soc_component * component,struct snd_pcm_substream * substream)872 static int rz_ssi_pcm_open(struct snd_soc_component *component,
873 			   struct snd_pcm_substream *substream)
874 {
875 	snd_soc_set_runtime_hwparams(substream, &rz_ssi_pcm_hardware);
876 
877 	return snd_pcm_hw_constraint_integer(substream->runtime,
878 					    SNDRV_PCM_HW_PARAM_PERIODS);
879 }
880 
rz_ssi_pcm_pointer(struct snd_soc_component * component,struct snd_pcm_substream * substream)881 static snd_pcm_uframes_t rz_ssi_pcm_pointer(struct snd_soc_component *component,
882 					    struct snd_pcm_substream *substream)
883 {
884 	struct snd_soc_dai *dai = rz_ssi_get_dai(substream);
885 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
886 	struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream);
887 
888 	return strm->buffer_pos;
889 }
890 
rz_ssi_pcm_new(struct snd_soc_component * component,struct snd_soc_pcm_runtime * rtd)891 static int rz_ssi_pcm_new(struct snd_soc_component *component,
892 			  struct snd_soc_pcm_runtime *rtd)
893 {
894 	snd_pcm_set_managed_buffer_all(rtd->pcm, SNDRV_DMA_TYPE_DEV,
895 				       rtd->card->snd_card->dev,
896 				       PREALLOC_BUFFER, PREALLOC_BUFFER_MAX);
897 	return 0;
898 }
899 
900 static struct snd_soc_dai_driver rz_ssi_soc_dai[] = {
901 	{
902 		.name			= "rz-ssi-dai",
903 		.playback = {
904 			.rates		= SSI_RATES,
905 			.formats	= SSI_FMTS,
906 			.channels_min	= SSI_CHAN_MIN,
907 			.channels_max	= SSI_CHAN_MAX,
908 		},
909 		.capture = {
910 			.rates		= SSI_RATES,
911 			.formats	= SSI_FMTS,
912 			.channels_min	= SSI_CHAN_MIN,
913 			.channels_max	= SSI_CHAN_MAX,
914 		},
915 		.ops = &rz_ssi_dai_ops,
916 	},
917 };
918 
919 static const struct snd_soc_component_driver rz_ssi_soc_component = {
920 	.name			= "rz-ssi",
921 	.open			= rz_ssi_pcm_open,
922 	.pointer		= rz_ssi_pcm_pointer,
923 	.pcm_construct		= rz_ssi_pcm_new,
924 	.legacy_dai_naming	= 1,
925 };
926 
rz_ssi_probe(struct platform_device * pdev)927 static int rz_ssi_probe(struct platform_device *pdev)
928 {
929 	struct rz_ssi_priv *ssi;
930 	struct clk *audio_clk;
931 	struct resource *res;
932 	int ret;
933 
934 	ssi = devm_kzalloc(&pdev->dev, sizeof(*ssi), GFP_KERNEL);
935 	if (!ssi)
936 		return -ENOMEM;
937 
938 	ssi->pdev = pdev;
939 	ssi->dev = &pdev->dev;
940 	ssi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
941 	if (IS_ERR(ssi->base))
942 		return PTR_ERR(ssi->base);
943 
944 	ssi->phys = res->start;
945 	ssi->clk = devm_clk_get(&pdev->dev, "ssi");
946 	if (IS_ERR(ssi->clk))
947 		return PTR_ERR(ssi->clk);
948 
949 	ssi->sfr_clk = devm_clk_get(&pdev->dev, "ssi_sfr");
950 	if (IS_ERR(ssi->sfr_clk))
951 		return PTR_ERR(ssi->sfr_clk);
952 
953 	audio_clk = devm_clk_get(&pdev->dev, "audio_clk1");
954 	if (IS_ERR(audio_clk))
955 		return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk),
956 				     "no audio clk1");
957 
958 	ssi->audio_clk_1 = clk_get_rate(audio_clk);
959 	audio_clk = devm_clk_get(&pdev->dev, "audio_clk2");
960 	if (IS_ERR(audio_clk))
961 		return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk),
962 				     "no audio clk2");
963 
964 	ssi->audio_clk_2 = clk_get_rate(audio_clk);
965 	if (!(ssi->audio_clk_1 || ssi->audio_clk_2))
966 		return dev_err_probe(&pdev->dev, -EINVAL,
967 				     "no audio clk1 or audio clk2");
968 
969 	ssi->audio_mck = ssi->audio_clk_1 ? ssi->audio_clk_1 : ssi->audio_clk_2;
970 
971 	/* Detect DMA support */
972 	ret = rz_ssi_dma_request(ssi, &pdev->dev);
973 	if (ret < 0) {
974 		dev_warn(&pdev->dev, "DMA not available, using PIO\n");
975 		ssi->playback.transfer = rz_ssi_pio_send;
976 		ssi->capture.transfer = rz_ssi_pio_recv;
977 	} else {
978 		dev_info(&pdev->dev, "DMA enabled");
979 		ssi->playback.transfer = rz_ssi_dma_transfer;
980 		ssi->capture.transfer = rz_ssi_dma_transfer;
981 	}
982 
983 	ssi->playback.priv = ssi;
984 	ssi->capture.priv = ssi;
985 
986 	spin_lock_init(&ssi->lock);
987 	dev_set_drvdata(&pdev->dev, ssi);
988 
989 	/* Error Interrupt */
990 	ssi->irq_int = platform_get_irq_byname(pdev, "int_req");
991 	if (ssi->irq_int < 0) {
992 		rz_ssi_release_dma_channels(ssi);
993 		return ssi->irq_int;
994 	}
995 
996 	ret = devm_request_irq(&pdev->dev, ssi->irq_int, &rz_ssi_interrupt,
997 			       0, dev_name(&pdev->dev), ssi);
998 	if (ret < 0) {
999 		rz_ssi_release_dma_channels(ssi);
1000 		return dev_err_probe(&pdev->dev, ret,
1001 				     "irq request error (int_req)\n");
1002 	}
1003 
1004 	if (!rz_ssi_is_dma_enabled(ssi)) {
1005 		/* Tx and Rx interrupts (pio only) */
1006 		ssi->irq_tx = platform_get_irq_byname(pdev, "dma_tx");
1007 		ssi->irq_rx = platform_get_irq_byname(pdev, "dma_rx");
1008 		if (ssi->irq_tx == -ENXIO && ssi->irq_rx == -ENXIO) {
1009 			ssi->irq_rt = platform_get_irq_byname(pdev, "dma_rt");
1010 			if (ssi->irq_rt < 0)
1011 				return ssi->irq_rt;
1012 
1013 			ret = devm_request_irq(&pdev->dev, ssi->irq_rt,
1014 					       &rz_ssi_interrupt, 0,
1015 					       dev_name(&pdev->dev), ssi);
1016 			if (ret < 0)
1017 				return dev_err_probe(&pdev->dev, ret,
1018 						     "irq request error (dma_rt)\n");
1019 		} else {
1020 			if (ssi->irq_tx < 0)
1021 				return ssi->irq_tx;
1022 
1023 			if (ssi->irq_rx < 0)
1024 				return ssi->irq_rx;
1025 
1026 			ret = devm_request_irq(&pdev->dev, ssi->irq_tx,
1027 					       &rz_ssi_interrupt, 0,
1028 					       dev_name(&pdev->dev), ssi);
1029 			if (ret < 0)
1030 				return dev_err_probe(&pdev->dev, ret,
1031 						"irq request error (dma_tx)\n");
1032 
1033 			ret = devm_request_irq(&pdev->dev, ssi->irq_rx,
1034 					       &rz_ssi_interrupt, 0,
1035 					       dev_name(&pdev->dev), ssi);
1036 			if (ret < 0)
1037 				return dev_err_probe(&pdev->dev, ret,
1038 						"irq request error (dma_rx)\n");
1039 		}
1040 	}
1041 
1042 	ssi->rstc = devm_reset_control_get_exclusive(&pdev->dev, NULL);
1043 	if (IS_ERR(ssi->rstc)) {
1044 		ret = PTR_ERR(ssi->rstc);
1045 		goto err_reset;
1046 	}
1047 
1048 	reset_control_deassert(ssi->rstc);
1049 	pm_runtime_enable(&pdev->dev);
1050 	ret = pm_runtime_resume_and_get(&pdev->dev);
1051 	if (ret < 0) {
1052 		dev_err(&pdev->dev, "pm_runtime_resume_and_get failed\n");
1053 		goto err_pm;
1054 	}
1055 
1056 	ret = devm_snd_soc_register_component(&pdev->dev, &rz_ssi_soc_component,
1057 					      rz_ssi_soc_dai,
1058 					      ARRAY_SIZE(rz_ssi_soc_dai));
1059 	if (ret < 0) {
1060 		dev_err(&pdev->dev, "failed to register snd component\n");
1061 		goto err_snd_soc;
1062 	}
1063 
1064 	return 0;
1065 
1066 err_snd_soc:
1067 	pm_runtime_put(ssi->dev);
1068 err_pm:
1069 	pm_runtime_disable(ssi->dev);
1070 	reset_control_assert(ssi->rstc);
1071 err_reset:
1072 	rz_ssi_release_dma_channels(ssi);
1073 
1074 	return ret;
1075 }
1076 
rz_ssi_remove(struct platform_device * pdev)1077 static void rz_ssi_remove(struct platform_device *pdev)
1078 {
1079 	struct rz_ssi_priv *ssi = dev_get_drvdata(&pdev->dev);
1080 
1081 	rz_ssi_release_dma_channels(ssi);
1082 
1083 	pm_runtime_put(ssi->dev);
1084 	pm_runtime_disable(ssi->dev);
1085 	reset_control_assert(ssi->rstc);
1086 }
1087 
1088 static const struct of_device_id rz_ssi_of_match[] = {
1089 	{ .compatible = "renesas,rz-ssi", },
1090 	{/* Sentinel */},
1091 };
1092 MODULE_DEVICE_TABLE(of, rz_ssi_of_match);
1093 
1094 static struct platform_driver rz_ssi_driver = {
1095 	.driver	= {
1096 		.name	= "rz-ssi-pcm-audio",
1097 		.of_match_table = rz_ssi_of_match,
1098 	},
1099 	.probe		= rz_ssi_probe,
1100 	.remove_new	= rz_ssi_remove,
1101 };
1102 
1103 module_platform_driver(rz_ssi_driver);
1104 
1105 MODULE_LICENSE("GPL v2");
1106 MODULE_DESCRIPTION("Renesas RZ/G2L ASoC Serial Sound Interface Driver");
1107 MODULE_AUTHOR("Biju Das <biju.das.jz@bp.renesas.com>");
1108