1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license. When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 // Rander Wang <rander.wang@intel.com>
11 // Keyon Jie <yang.jie@linux.intel.com>
12 //
13
14 /*
15 * Hardware interface for generic Intel audio DSP HDA IP
16 */
17
18 #include <sound/hdaudio_ext.h>
19 #include <sound/hda_register.h>
20 #include <sound/sof.h>
21 #include <trace/events/sof_intel.h>
22 #include "../ops.h"
23 #include "../sof-audio.h"
24 #include "../ipc4-priv.h"
25 #include "hda.h"
26
27 int sof_hda_position_quirk = SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS;
28 module_param_named(position_quirk, sof_hda_position_quirk, int, 0444);
29 MODULE_PARM_DESC(position_quirk, "SOF HDaudio position quirk");
30 EXPORT_SYMBOL_NS(sof_hda_position_quirk, SND_SOC_SOF_INTEL_HDA_COMMON);
31
32 #define HDA_LTRP_GB_VALUE_US 95
33
hda_hstream_direction_str(struct hdac_stream * hstream)34 static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream)
35 {
36 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
37 return "Playback";
38 else
39 return "Capture";
40 }
41
hda_hstream_dbg_get_stream_info_str(struct hdac_stream * hstream)42 static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream)
43 {
44 struct snd_soc_pcm_runtime *rtd;
45
46 if (hstream->substream)
47 rtd = snd_soc_substream_to_rtd(hstream->substream);
48 else if (hstream->cstream)
49 rtd = hstream->cstream->private_data;
50 else
51 /* Non audio DMA user, like dma-trace */
52 return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)",
53 hda_hstream_direction_str(hstream),
54 hstream->stream_tag);
55
56 return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)",
57 rtd->dai_link->name, hda_hstream_direction_str(hstream),
58 hstream->stream_tag);
59 }
60
61 /*
62 * set up one of BDL entries for a stream
63 */
hda_setup_bdle(struct snd_sof_dev * sdev,struct snd_dma_buffer * dmab,struct hdac_stream * hstream,struct sof_intel_dsp_bdl ** bdlp,int offset,int size,int ioc)64 static int hda_setup_bdle(struct snd_sof_dev *sdev,
65 struct snd_dma_buffer *dmab,
66 struct hdac_stream *hstream,
67 struct sof_intel_dsp_bdl **bdlp,
68 int offset, int size, int ioc)
69 {
70 struct hdac_bus *bus = sof_to_bus(sdev);
71 struct sof_intel_dsp_bdl *bdl = *bdlp;
72
73 while (size > 0) {
74 dma_addr_t addr;
75 int chunk;
76
77 if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
78 dev_err(sdev->dev, "error: stream frags exceeded\n");
79 return -EINVAL;
80 }
81
82 addr = snd_sgbuf_get_addr(dmab, offset);
83 /* program BDL addr */
84 bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
85 bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
86 /* program BDL size */
87 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
88 /* one BDLE should not cross 4K boundary */
89 if (bus->align_bdle_4k) {
90 u32 remain = 0x1000 - (offset & 0xfff);
91
92 if (chunk > remain)
93 chunk = remain;
94 }
95 bdl->size = cpu_to_le32(chunk);
96 /* only program IOC when the whole segment is processed */
97 size -= chunk;
98 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
99 bdl++;
100 hstream->frags++;
101 offset += chunk;
102 }
103
104 *bdlp = bdl;
105 return offset;
106 }
107
108 /*
109 * set up Buffer Descriptor List (BDL) for host memory transfer
110 * BDL describes the location of the individual buffers and is little endian.
111 */
hda_dsp_stream_setup_bdl(struct snd_sof_dev * sdev,struct snd_dma_buffer * dmab,struct hdac_stream * hstream)112 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
113 struct snd_dma_buffer *dmab,
114 struct hdac_stream *hstream)
115 {
116 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
117 struct sof_intel_dsp_bdl *bdl;
118 int i, offset, period_bytes, periods;
119 int remain, ioc;
120
121 period_bytes = hstream->period_bytes;
122 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
123 if (!period_bytes)
124 period_bytes = hstream->bufsize;
125
126 periods = hstream->bufsize / period_bytes;
127
128 dev_dbg(sdev->dev, "periods:%d\n", periods);
129
130 remain = hstream->bufsize % period_bytes;
131 if (remain)
132 periods++;
133
134 /* program the initial BDL entries */
135 bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area;
136 offset = 0;
137 hstream->frags = 0;
138
139 /*
140 * set IOC if don't use position IPC
141 * and period_wakeup needed.
142 */
143 ioc = hda->no_ipc_position ?
144 !hstream->no_period_wakeup : 0;
145
146 for (i = 0; i < periods; i++) {
147 if (i == (periods - 1) && remain)
148 /* set the last small entry */
149 offset = hda_setup_bdle(sdev, dmab,
150 hstream, &bdl, offset,
151 remain, 0);
152 else
153 offset = hda_setup_bdle(sdev, dmab,
154 hstream, &bdl, offset,
155 period_bytes, ioc);
156 }
157
158 return offset;
159 }
160
hda_dsp_stream_spib_config(struct snd_sof_dev * sdev,struct hdac_ext_stream * hext_stream,int enable,u32 size)161 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
162 struct hdac_ext_stream *hext_stream,
163 int enable, u32 size)
164 {
165 struct hdac_stream *hstream = &hext_stream->hstream;
166 u32 mask;
167
168 if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
169 dev_err(sdev->dev, "error: address of spib capability is NULL\n");
170 return -EINVAL;
171 }
172
173 mask = (1 << hstream->index);
174
175 /* enable/disable SPIB for the stream */
176 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
177 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
178 enable << hstream->index);
179
180 /* set the SPIB value */
181 sof_io_write(sdev, hstream->spib_addr, size);
182
183 return 0;
184 }
185
186 /* get next unused stream */
187 struct hdac_ext_stream *
hda_dsp_stream_get(struct snd_sof_dev * sdev,int direction,u32 flags)188 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
189 {
190 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata);
191 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
192 struct hdac_bus *bus = sof_to_bus(sdev);
193 struct sof_intel_hda_stream *hda_stream;
194 struct hdac_ext_stream *hext_stream = NULL;
195 struct hdac_stream *s;
196
197 spin_lock_irq(&bus->reg_lock);
198
199 /* get an unused stream */
200 list_for_each_entry(s, &bus->stream_list, list) {
201 if (s->direction == direction && !s->opened) {
202 hext_stream = stream_to_hdac_ext_stream(s);
203 hda_stream = container_of(hext_stream,
204 struct sof_intel_hda_stream,
205 hext_stream);
206 /* check if the host DMA channel is reserved */
207 if (hda_stream->host_reserved)
208 continue;
209
210 s->opened = true;
211 break;
212 }
213 }
214
215 spin_unlock_irq(&bus->reg_lock);
216
217 /* stream found ? */
218 if (!hext_stream) {
219 dev_err(sdev->dev, "error: no free %s streams\n", snd_pcm_direction_name(direction));
220 return hext_stream;
221 }
222
223 hda_stream->flags = flags;
224
225 /*
226 * Prevent DMI Link L1 entry for streams that don't support it.
227 * Workaround to address a known issue with host DMA that results
228 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP.
229 */
230 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 &&
231 !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
232 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
233 HDA_VS_INTEL_EM2,
234 HDA_VS_INTEL_EM2_L1SEN, 0);
235 hda->l1_disabled = true;
236 }
237
238 return hext_stream;
239 }
240
241 /* free a stream */
hda_dsp_stream_put(struct snd_sof_dev * sdev,int direction,int stream_tag)242 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
243 {
244 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata);
245 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
246 struct hdac_bus *bus = sof_to_bus(sdev);
247 struct sof_intel_hda_stream *hda_stream;
248 struct hdac_ext_stream *hext_stream;
249 struct hdac_stream *s;
250 bool dmi_l1_enable = true;
251 bool found = false;
252
253 spin_lock_irq(&bus->reg_lock);
254
255 /*
256 * close stream matching the stream tag and check if there are any open streams
257 * that are DMI L1 incompatible.
258 */
259 list_for_each_entry(s, &bus->stream_list, list) {
260 hext_stream = stream_to_hdac_ext_stream(s);
261 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream);
262
263 if (!s->opened)
264 continue;
265
266 if (s->direction == direction && s->stream_tag == stream_tag) {
267 s->opened = false;
268 found = true;
269 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
270 dmi_l1_enable = false;
271 }
272 }
273
274 spin_unlock_irq(&bus->reg_lock);
275
276 /* Enable DMI L1 if permitted */
277 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) {
278 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
279 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
280 hda->l1_disabled = false;
281 }
282
283 if (!found) {
284 dev_err(sdev->dev, "%s: stream_tag %d not opened!\n",
285 __func__, stream_tag);
286 return -ENODEV;
287 }
288
289 return 0;
290 }
291
hda_dsp_stream_reset(struct snd_sof_dev * sdev,struct hdac_stream * hstream)292 static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream)
293 {
294 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
295 int timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
296 u32 val;
297
298 /* enter stream reset */
299 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST,
300 SOF_STREAM_SD_OFFSET_CRST);
301 do {
302 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
303 if (val & SOF_STREAM_SD_OFFSET_CRST)
304 break;
305 } while (--timeout);
306 if (timeout == 0) {
307 dev_err(sdev->dev, "timeout waiting for stream reset\n");
308 return -ETIMEDOUT;
309 }
310
311 timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
312
313 /* exit stream reset and wait to read a zero before reading any other register */
314 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0);
315
316 /* wait for hardware to report that stream is out of reset */
317 udelay(3);
318 do {
319 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
320 if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0)
321 break;
322 } while (--timeout);
323 if (timeout == 0) {
324 dev_err(sdev->dev, "timeout waiting for stream to exit reset\n");
325 return -ETIMEDOUT;
326 }
327
328 return 0;
329 }
330
hda_dsp_stream_trigger(struct snd_sof_dev * sdev,struct hdac_ext_stream * hext_stream,int cmd)331 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
332 struct hdac_ext_stream *hext_stream, int cmd)
333 {
334 struct hdac_stream *hstream = &hext_stream->hstream;
335 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
336 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
337 int ret = 0;
338 u32 run;
339
340 /* cmd must be for audio stream */
341 switch (cmd) {
342 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
343 if (!sdev->dspless_mode_selected)
344 break;
345 fallthrough;
346 case SNDRV_PCM_TRIGGER_START:
347 if (hstream->running)
348 break;
349
350 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
351 1 << hstream->index,
352 1 << hstream->index);
353
354 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
355 sd_offset,
356 SOF_HDA_SD_CTL_DMA_START |
357 SOF_HDA_CL_DMA_SD_INT_MASK,
358 SOF_HDA_SD_CTL_DMA_START |
359 SOF_HDA_CL_DMA_SD_INT_MASK);
360
361 ret = snd_sof_dsp_read_poll_timeout(sdev,
362 HDA_DSP_HDA_BAR,
363 sd_offset, run,
364 ((run & dma_start) == dma_start),
365 HDA_DSP_REG_POLL_INTERVAL_US,
366 HDA_DSP_STREAM_RUN_TIMEOUT);
367
368 if (ret >= 0)
369 hstream->running = true;
370
371 break;
372 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
373 if (!sdev->dspless_mode_selected)
374 break;
375 fallthrough;
376 case SNDRV_PCM_TRIGGER_SUSPEND:
377 case SNDRV_PCM_TRIGGER_STOP:
378 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
379 sd_offset,
380 SOF_HDA_SD_CTL_DMA_START |
381 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
382
383 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
384 sd_offset, run,
385 !(run & dma_start),
386 HDA_DSP_REG_POLL_INTERVAL_US,
387 HDA_DSP_STREAM_RUN_TIMEOUT);
388
389 if (ret >= 0) {
390 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
391 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
392 SOF_HDA_CL_DMA_SD_INT_MASK);
393
394 hstream->running = false;
395 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
396 SOF_HDA_INTCTL,
397 1 << hstream->index, 0x0);
398 }
399 break;
400 default:
401 dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
402 return -EINVAL;
403 }
404
405 if (ret < 0) {
406 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
407
408 dev_err(sdev->dev,
409 "%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n",
410 __func__, cmd, stream_name ? stream_name : "unknown stream");
411 kfree(stream_name);
412 }
413
414 return ret;
415 }
416
417 /* minimal recommended programming for ICCMAX stream */
hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev * sdev,struct hdac_ext_stream * hext_stream,struct snd_dma_buffer * dmab,struct snd_pcm_hw_params * params)418 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream,
419 struct snd_dma_buffer *dmab,
420 struct snd_pcm_hw_params *params)
421 {
422 struct hdac_stream *hstream = &hext_stream->hstream;
423 int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
424 int ret;
425 u32 mask = 0x1 << hstream->index;
426
427 if (!hext_stream) {
428 dev_err(sdev->dev, "error: no stream available\n");
429 return -ENODEV;
430 }
431
432 if (!dmab) {
433 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
434 return -ENODEV;
435 }
436
437 if (hstream->posbuf)
438 *hstream->posbuf = 0;
439
440 /* reset BDL address */
441 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
442 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
443 0x0);
444 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
445 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
446 0x0);
447
448 hstream->frags = 0;
449
450 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
451 if (ret < 0) {
452 dev_err(sdev->dev, "error: set up of BDL failed\n");
453 return ret;
454 }
455
456 /* program BDL address */
457 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
458 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
459 (u32)hstream->bdl.addr);
460 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
461 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
462 upper_32_bits(hstream->bdl.addr));
463
464 /* program cyclic buffer length */
465 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
466 sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
467 hstream->bufsize);
468
469 /* program last valid index */
470 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
471 sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
472 0xffff, (hstream->frags - 1));
473
474 /* decouple host and link DMA, enable DSP features */
475 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
476 mask, mask);
477
478 /* Follow HW recommendation to set the guardband value to 95us during FW boot */
479 snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP,
480 HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
481
482 /* start DMA */
483 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
484 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
485
486 return 0;
487 }
488
489 /*
490 * prepare for common hdac registers settings, for both code loader
491 * and normal stream.
492 */
hda_dsp_stream_hw_params(struct snd_sof_dev * sdev,struct hdac_ext_stream * hext_stream,struct snd_dma_buffer * dmab,struct snd_pcm_hw_params * params)493 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
494 struct hdac_ext_stream *hext_stream,
495 struct snd_dma_buffer *dmab,
496 struct snd_pcm_hw_params *params)
497 {
498 const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata);
499 struct hdac_bus *bus = sof_to_bus(sdev);
500 struct hdac_stream *hstream;
501 int sd_offset, ret;
502 u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
503 u32 mask;
504 u32 run;
505
506 if (!hext_stream) {
507 dev_err(sdev->dev, "error: no stream available\n");
508 return -ENODEV;
509 }
510
511 if (!dmab) {
512 dev_err(sdev->dev, "error: no dma buffer allocated!\n");
513 return -ENODEV;
514 }
515
516 hstream = &hext_stream->hstream;
517 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
518 mask = BIT(hstream->index);
519
520 /* decouple host and link DMA if the DSP is used */
521 if (!sdev->dspless_mode_selected)
522 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
523 mask, mask);
524
525 /* clear stream status */
526 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
527 SOF_HDA_CL_DMA_SD_INT_MASK |
528 SOF_HDA_SD_CTL_DMA_START, 0);
529
530 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
531 sd_offset, run,
532 !(run & dma_start),
533 HDA_DSP_REG_POLL_INTERVAL_US,
534 HDA_DSP_STREAM_RUN_TIMEOUT);
535
536 if (ret < 0) {
537 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
538
539 dev_err(sdev->dev,
540 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
541 __func__, stream_name ? stream_name : "unknown stream");
542 kfree(stream_name);
543 return ret;
544 }
545
546 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
547 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
548 SOF_HDA_CL_DMA_SD_INT_MASK,
549 SOF_HDA_CL_DMA_SD_INT_MASK);
550
551 /* stream reset */
552 ret = hda_dsp_stream_reset(sdev, hstream);
553 if (ret < 0)
554 return ret;
555
556 if (hstream->posbuf)
557 *hstream->posbuf = 0;
558
559 /* reset BDL address */
560 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
561 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
562 0x0);
563 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
564 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
565 0x0);
566
567 /* clear stream status */
568 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
569 SOF_HDA_CL_DMA_SD_INT_MASK |
570 SOF_HDA_SD_CTL_DMA_START, 0);
571
572 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
573 sd_offset, run,
574 !(run & dma_start),
575 HDA_DSP_REG_POLL_INTERVAL_US,
576 HDA_DSP_STREAM_RUN_TIMEOUT);
577
578 if (ret < 0) {
579 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
580
581 dev_err(sdev->dev,
582 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
583 __func__, stream_name ? stream_name : "unknown stream");
584 kfree(stream_name);
585 return ret;
586 }
587
588 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
589 sd_offset + SOF_HDA_ADSP_REG_SD_STS,
590 SOF_HDA_CL_DMA_SD_INT_MASK,
591 SOF_HDA_CL_DMA_SD_INT_MASK);
592
593 hstream->frags = 0;
594
595 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
596 if (ret < 0) {
597 dev_err(sdev->dev, "error: set up of BDL failed\n");
598 return ret;
599 }
600
601 /* program stream tag to set up stream descriptor for DMA */
602 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
603 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
604 hstream->stream_tag <<
605 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
606
607 /* program cyclic buffer length */
608 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
609 sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
610 hstream->bufsize);
611
612 /*
613 * Recommended hardware programming sequence for HDAudio DMA format
614 * on earlier platforms - this is not needed on newer platforms
615 *
616 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
617 * for corresponding stream index before the time of writing
618 * format to SDxFMT register.
619 * 2. Write SDxFMT
620 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
621 * enable decoupled mode
622 */
623
624 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
625 /* couple host and link DMA, disable DSP features */
626 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
627 mask, 0);
628
629 /* program stream format */
630 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
631 sd_offset +
632 SOF_HDA_ADSP_REG_SD_FORMAT,
633 0xffff, hstream->format_val);
634
635 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
636 /* decouple host and link DMA, enable DSP features */
637 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
638 mask, mask);
639
640 /* program last valid index */
641 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
642 sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
643 0xffff, (hstream->frags - 1));
644
645 /* program BDL address */
646 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
647 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
648 (u32)hstream->bdl.addr);
649 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
650 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
651 upper_32_bits(hstream->bdl.addr));
652
653 /* enable position buffer, if needed */
654 if (bus->use_posbuf && bus->posbuf.addr &&
655 !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
656 & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
657 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
658 upper_32_bits(bus->posbuf.addr));
659 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
660 (u32)bus->posbuf.addr |
661 SOF_HDA_ADSP_DPLBASE_ENABLE);
662 }
663
664 /* set interrupt enable bits */
665 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
666 SOF_HDA_CL_DMA_SD_INT_MASK,
667 SOF_HDA_CL_DMA_SD_INT_MASK);
668
669 /* read FIFO size */
670 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
671 hstream->fifo_size =
672 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
673 sd_offset +
674 SOF_HDA_ADSP_REG_SD_FIFOSIZE);
675 hstream->fifo_size &= SOF_HDA_SD_FIFOSIZE_FIFOS_MASK;
676 hstream->fifo_size += 1;
677 } else {
678 hstream->fifo_size = 0;
679 }
680
681 return ret;
682 }
683
hda_dsp_stream_hw_free(struct snd_sof_dev * sdev,struct snd_pcm_substream * substream)684 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
685 struct snd_pcm_substream *substream)
686 {
687 struct hdac_stream *hstream = substream->runtime->private_data;
688 struct hdac_ext_stream *hext_stream = container_of(hstream,
689 struct hdac_ext_stream,
690 hstream);
691 int ret;
692
693 ret = hda_dsp_stream_reset(sdev, hstream);
694 if (ret < 0)
695 return ret;
696
697 if (!sdev->dspless_mode_selected) {
698 struct hdac_bus *bus = sof_to_bus(sdev);
699 u32 mask = BIT(hstream->index);
700
701 spin_lock_irq(&bus->reg_lock);
702 /* couple host and link DMA if link DMA channel is idle */
703 if (!hext_stream->link_locked)
704 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
705 SOF_HDA_REG_PP_PPCTL, mask, 0);
706 spin_unlock_irq(&bus->reg_lock);
707 }
708
709 hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
710
711 hstream->substream = NULL;
712
713 return 0;
714 }
715 EXPORT_SYMBOL_NS(hda_dsp_stream_hw_free, SND_SOC_SOF_INTEL_HDA_COMMON);
716
hda_dsp_check_stream_irq(struct snd_sof_dev * sdev)717 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
718 {
719 struct hdac_bus *bus = sof_to_bus(sdev);
720 bool ret = false;
721 u32 status;
722
723 /* The function can be called at irq thread, so use spin_lock_irq */
724 spin_lock_irq(&bus->reg_lock);
725
726 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
727
728 trace_sof_intel_hda_dsp_check_stream_irq(sdev, status);
729
730 /* if Register inaccessible, ignore it.*/
731 if (status != 0xffffffff)
732 ret = true;
733
734 spin_unlock_irq(&bus->reg_lock);
735
736 return ret;
737 }
738 EXPORT_SYMBOL_NS(hda_dsp_check_stream_irq, SND_SOC_SOF_INTEL_HDA_COMMON);
739
740 static void
hda_dsp_compr_bytes_transferred(struct hdac_stream * hstream,int direction)741 hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction)
742 {
743 u64 buffer_size = hstream->bufsize;
744 u64 prev_pos, pos, num_bytes;
745
746 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
747 pos = hda_dsp_stream_get_position(hstream, direction, false);
748
749 if (pos < prev_pos)
750 num_bytes = (buffer_size - prev_pos) + pos;
751 else
752 num_bytes = pos - prev_pos;
753
754 hstream->curr_pos += num_bytes;
755 }
756
hda_dsp_stream_check(struct hdac_bus * bus,u32 status)757 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
758 {
759 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
760 struct hdac_stream *s;
761 bool active = false;
762 u32 sd_status;
763
764 list_for_each_entry(s, &bus->stream_list, list) {
765 if (status & BIT(s->index) && s->opened) {
766 sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
767
768 trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status);
769
770 writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
771
772 active = true;
773 if (!s->running)
774 continue;
775 if ((sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
776 continue;
777 if (!s->substream && !s->cstream) {
778 /*
779 * when no substream is found, the DMA may used for code loading
780 * or data transfers which can rely on wait_for_completion()
781 */
782 struct sof_intel_hda_stream *hda_stream;
783 struct hdac_ext_stream *hext_stream;
784
785 hext_stream = stream_to_hdac_ext_stream(s);
786 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
787 hext_stream);
788
789 complete(&hda_stream->ioc);
790 continue;
791 }
792
793 /* Inform ALSA only if the IPC position is not used */
794 if (s->substream && sof_hda->no_ipc_position) {
795 snd_sof_pcm_period_elapsed(s->substream);
796 } else if (s->cstream) {
797 hda_dsp_compr_bytes_transferred(s, s->cstream->direction);
798 snd_compr_fragment_elapsed(s->cstream);
799 }
800 }
801 }
802
803 return active;
804 }
805
hda_dsp_stream_threaded_handler(int irq,void * context)806 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
807 {
808 struct snd_sof_dev *sdev = context;
809 struct hdac_bus *bus = sof_to_bus(sdev);
810 bool active;
811 u32 status;
812 int i;
813
814 /*
815 * Loop 10 times to handle missed interrupts caused by
816 * unsolicited responses from the codec
817 */
818 for (i = 0, active = true; i < 10 && active; i++) {
819 spin_lock_irq(&bus->reg_lock);
820
821 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
822
823 /* check streams */
824 active = hda_dsp_stream_check(bus, status);
825
826 /* check and clear RIRB interrupt */
827 if (status & AZX_INT_CTRL_EN) {
828 active |= hda_codec_check_rirb_status(sdev);
829 }
830 spin_unlock_irq(&bus->reg_lock);
831 }
832
833 return IRQ_HANDLED;
834 }
835 EXPORT_SYMBOL_NS(hda_dsp_stream_threaded_handler, SND_SOC_SOF_INTEL_HDA_COMMON);
836
hda_dsp_stream_init(struct snd_sof_dev * sdev)837 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
838 {
839 struct hdac_bus *bus = sof_to_bus(sdev);
840 struct hdac_ext_stream *hext_stream;
841 struct hdac_stream *hstream;
842 struct pci_dev *pci = to_pci_dev(sdev->dev);
843 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
844 int sd_offset;
845 int i, num_playback, num_capture, num_total, ret;
846 u32 gcap;
847
848 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
849 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
850
851 /* get stream count from GCAP */
852 num_capture = (gcap >> 8) & 0x0f;
853 num_playback = (gcap >> 12) & 0x0f;
854 num_total = num_playback + num_capture;
855
856 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
857 num_playback, num_capture);
858
859 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
860 dev_err(sdev->dev, "error: too many playback streams %d\n",
861 num_playback);
862 return -EINVAL;
863 }
864
865 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
866 dev_err(sdev->dev, "error: too many capture streams %d\n",
867 num_playback);
868 return -EINVAL;
869 }
870
871 /*
872 * mem alloc for the position buffer
873 * TODO: check position buffer update
874 */
875 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
876 SOF_HDA_DPIB_ENTRY_SIZE * num_total,
877 &bus->posbuf);
878 if (ret < 0) {
879 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
880 return -ENOMEM;
881 }
882
883 /*
884 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for
885 * HDAudio codecs
886 */
887 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
888 PAGE_SIZE, &bus->rb);
889 if (ret < 0) {
890 dev_err(sdev->dev, "error: RB alloc failed\n");
891 return -ENOMEM;
892 }
893
894 /* create capture and playback streams */
895 for (i = 0; i < num_total; i++) {
896 struct sof_intel_hda_stream *hda_stream;
897
898 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
899 GFP_KERNEL);
900 if (!hda_stream)
901 return -ENOMEM;
902
903 hda_stream->sdev = sdev;
904 init_completion(&hda_stream->ioc);
905
906 hext_stream = &hda_stream->hext_stream;
907
908 if (sdev->bar[HDA_DSP_PP_BAR]) {
909 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
910 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
911
912 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
913 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
914 SOF_HDA_PPLC_INTERVAL * i;
915 }
916
917 hstream = &hext_stream->hstream;
918
919 /* do we support SPIB */
920 if (sdev->bar[HDA_DSP_SPIB_BAR]) {
921 hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
922 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
923 SOF_HDA_SPIB_SPIB;
924
925 hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
926 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
927 SOF_HDA_SPIB_MAXFIFO;
928 }
929
930 hstream->bus = bus;
931 hstream->sd_int_sta_mask = 1 << i;
932 hstream->index = i;
933 sd_offset = SOF_STREAM_SD_OFFSET(hstream);
934 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
935 hstream->opened = false;
936 hstream->running = false;
937
938 if (i < num_capture) {
939 hstream->stream_tag = i + 1;
940 hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
941 } else {
942 hstream->stream_tag = i - num_capture + 1;
943 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
944 }
945
946 /* mem alloc for stream BDL */
947 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
948 HDA_DSP_BDL_SIZE, &hstream->bdl);
949 if (ret < 0) {
950 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
951 return -ENOMEM;
952 }
953
954 hstream->posbuf = (__le32 *)(bus->posbuf.area +
955 (hstream->index) * 8);
956
957 list_add_tail(&hstream->list, &bus->stream_list);
958 }
959
960 /* store total stream count (playback + capture) from GCAP */
961 sof_hda->stream_max = num_total;
962
963 /* store stream count from GCAP required for CHAIN_DMA */
964 if (sdev->pdata->ipc_type == SOF_IPC_TYPE_4) {
965 struct sof_ipc4_fw_data *ipc4_data = sdev->private;
966
967 ipc4_data->num_playback_streams = num_playback;
968 ipc4_data->num_capture_streams = num_capture;
969 }
970
971 return 0;
972 }
973 EXPORT_SYMBOL_NS(hda_dsp_stream_init, SND_SOC_SOF_INTEL_HDA_COMMON);
974
hda_dsp_stream_free(struct snd_sof_dev * sdev)975 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
976 {
977 struct hdac_bus *bus = sof_to_bus(sdev);
978 struct hdac_stream *s, *_s;
979 struct hdac_ext_stream *hext_stream;
980 struct sof_intel_hda_stream *hda_stream;
981
982 /* free position buffer */
983 if (bus->posbuf.area)
984 snd_dma_free_pages(&bus->posbuf);
985
986 /* free CORB/RIRB buffer - only used for HDaudio codecs */
987 if (bus->rb.area)
988 snd_dma_free_pages(&bus->rb);
989
990 list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
991 /* TODO: decouple */
992
993 /* free bdl buffer */
994 if (s->bdl.area)
995 snd_dma_free_pages(&s->bdl);
996 list_del(&s->list);
997 hext_stream = stream_to_hdac_ext_stream(s);
998 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
999 hext_stream);
1000 devm_kfree(sdev->dev, hda_stream);
1001 }
1002 }
1003 EXPORT_SYMBOL_NS(hda_dsp_stream_free, SND_SOC_SOF_INTEL_HDA_COMMON);
1004
hda_dsp_stream_get_position(struct hdac_stream * hstream,int direction,bool can_sleep)1005 snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream,
1006 int direction, bool can_sleep)
1007 {
1008 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1009 struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream);
1010 struct snd_sof_dev *sdev = hda_stream->sdev;
1011 snd_pcm_uframes_t pos;
1012
1013 switch (sof_hda_position_quirk) {
1014 case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY:
1015 /*
1016 * This legacy code, inherited from the Skylake driver,
1017 * mixes DPIB registers and DPIB DDR updates and
1018 * does not seem to follow any known hardware recommendations.
1019 * It's not clear e.g. why there is a different flow
1020 * for capture and playback, the only information that matters is
1021 * what traffic class is used, and on all SOF-enabled platforms
1022 * only VC0 is supported so the work-around was likely not necessary
1023 * and quite possibly wrong.
1024 */
1025
1026 /* DPIB/posbuf position mode:
1027 * For Playback, Use DPIB register from HDA space which
1028 * reflects the actual data transferred.
1029 * For Capture, Use the position buffer for pointer, as DPIB
1030 * is not accurate enough, its update may be completed
1031 * earlier than the data written to DDR.
1032 */
1033 if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
1034 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1035 AZX_REG_VS_SDXDPIB_XBASE +
1036 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1037 hstream->index));
1038 } else {
1039 /*
1040 * For capture stream, we need more workaround to fix the
1041 * position incorrect issue:
1042 *
1043 * 1. Wait at least 20us before reading position buffer after
1044 * the interrupt generated(IOC), to make sure position update
1045 * happens on frame boundary i.e. 20.833uSec for 48KHz.
1046 * 2. Perform a dummy Read to DPIB register to flush DMA
1047 * position value.
1048 * 3. Read the DMA Position from posbuf. Now the readback
1049 * value should be >= period boundary.
1050 */
1051 if (can_sleep)
1052 usleep_range(20, 21);
1053
1054 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1055 AZX_REG_VS_SDXDPIB_XBASE +
1056 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1057 hstream->index));
1058 pos = snd_hdac_stream_get_pos_posbuf(hstream);
1059 }
1060 break;
1061 case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS:
1062 /*
1063 * In case VC1 traffic is disabled this is the recommended option
1064 */
1065 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1066 AZX_REG_VS_SDXDPIB_XBASE +
1067 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1068 hstream->index));
1069 break;
1070 case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE:
1071 /*
1072 * This is the recommended option when VC1 is enabled.
1073 * While this isn't needed for SOF platforms it's added for
1074 * consistency and debug.
1075 */
1076 pos = snd_hdac_stream_get_pos_posbuf(hstream);
1077 break;
1078 default:
1079 dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n",
1080 sof_hda_position_quirk);
1081 pos = 0;
1082 break;
1083 }
1084
1085 if (pos >= hstream->bufsize)
1086 pos = 0;
1087
1088 return pos;
1089 }
1090 EXPORT_SYMBOL_NS(hda_dsp_stream_get_position, SND_SOC_SOF_INTEL_HDA_COMMON);
1091
1092 #define merge_u64(u32_u, u32_l) (((u64)(u32_u) << 32) | (u32_l))
1093
1094 /**
1095 * hda_dsp_get_stream_llp - Retrieve the LLP (Linear Link Position) of the stream
1096 * @sdev: SOF device
1097 * @component: ASoC component
1098 * @substream: PCM substream
1099 *
1100 * Returns the raw Linear Link Position value
1101 */
hda_dsp_get_stream_llp(struct snd_sof_dev * sdev,struct snd_soc_component * component,struct snd_pcm_substream * substream)1102 u64 hda_dsp_get_stream_llp(struct snd_sof_dev *sdev,
1103 struct snd_soc_component *component,
1104 struct snd_pcm_substream *substream)
1105 {
1106 struct hdac_stream *hstream = substream->runtime->private_data;
1107 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1108 u32 llp_l, llp_u;
1109
1110 /*
1111 * The pplc_addr have been calculated during probe in
1112 * hda_dsp_stream_init():
1113 * pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
1114 * SOF_HDA_PPLC_BASE +
1115 * SOF_HDA_PPLC_MULTI * total_stream +
1116 * SOF_HDA_PPLC_INTERVAL * stream_index
1117 *
1118 * Use this pre-calculated address to avoid repeated re-calculation.
1119 */
1120 llp_l = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPL);
1121 llp_u = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPU);
1122
1123 /* Compensate the LLP counter with the saved offset */
1124 if (hext_stream->pplcllpl || hext_stream->pplcllpu)
1125 return merge_u64(llp_u, llp_l) -
1126 merge_u64(hext_stream->pplcllpu, hext_stream->pplcllpl);
1127
1128 return merge_u64(llp_u, llp_l);
1129 }
1130 EXPORT_SYMBOL_NS(hda_dsp_get_stream_llp, SND_SOC_SOF_INTEL_HDA_COMMON);
1131
1132 /**
1133 * hda_dsp_get_stream_ldp - Retrieve the LDP (Linear DMA Position) of the stream
1134 * @sdev: SOF device
1135 * @component: ASoC component
1136 * @substream: PCM substream
1137 *
1138 * Returns the raw Linear Link Position value
1139 */
hda_dsp_get_stream_ldp(struct snd_sof_dev * sdev,struct snd_soc_component * component,struct snd_pcm_substream * substream)1140 u64 hda_dsp_get_stream_ldp(struct snd_sof_dev *sdev,
1141 struct snd_soc_component *component,
1142 struct snd_pcm_substream *substream)
1143 {
1144 struct hdac_stream *hstream = substream->runtime->private_data;
1145 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1146 u32 ldp_l, ldp_u;
1147
1148 /*
1149 * The pphc_addr have been calculated during probe in
1150 * hda_dsp_stream_init():
1151 * pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
1152 * SOF_HDA_PPHC_BASE +
1153 * SOF_HDA_PPHC_INTERVAL * stream_index
1154 *
1155 * Use this pre-calculated address to avoid repeated re-calculation.
1156 */
1157 ldp_l = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPL);
1158 ldp_u = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPU);
1159
1160 return ((u64)ldp_u << 32) | ldp_l;
1161 }
1162 EXPORT_SYMBOL_NS(hda_dsp_get_stream_ldp, SND_SOC_SOF_INTEL_HDA_COMMON);
1163