1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2010-2011,2013-2015 The Linux Foundation. All rights reserved.
5 * lpass-cpu.c -- ALSA SoC CPU DAI driver for QTi LPASS
8 #include <dt-bindings/sound/qcom,lpass.h>
10 #include <linux/kernel.h>
11 #include <linux/module.h>
13 #include <linux/platform_device.h>
14 #include <sound/pcm.h>
15 #include <sound/pcm_params.h>
16 #include <linux/regmap.h>
17 #include <sound/soc.h>
18 #include <sound/soc-dai.h>
19 #include "lpass-lpaif-reg.h"
22 #define LPASS_CPU_MAX_MI2S_LINES 4
23 #define LPASS_CPU_I2S_SD0_MASK BIT(0)
24 #define LPASS_CPU_I2S_SD1_MASK BIT(1)
25 #define LPASS_CPU_I2S_SD2_MASK BIT(2)
26 #define LPASS_CPU_I2S_SD3_MASK BIT(3)
27 #define LPASS_CPU_I2S_SD0_1_MASK GENMASK(1, 0)
28 #define LPASS_CPU_I2S_SD2_3_MASK GENMASK(3, 2)
29 #define LPASS_CPU_I2S_SD0_1_2_MASK GENMASK(2, 0)
30 #define LPASS_CPU_I2S_SD0_1_2_3_MASK GENMASK(3, 0)
31 #define LPASS_REG_READ 1
32 #define LPASS_REG_WRITE 0
35 * Channel maps for Quad channel playbacks on MI2S Secondary
37 static struct snd_pcm_chmap_elem lpass_quad_chmaps[] = {
39 .map = { SNDRV_CHMAP_FL, SNDRV_CHMAP_RL,
40 SNDRV_CHMAP_FR, SNDRV_CHMAP_RR } },
43 static int lpass_cpu_init_i2sctl_bitfields(struct device *dev,
44 struct lpaif_i2sctl *i2sctl, struct regmap *map)
46 struct lpass_data *drvdata = dev_get_drvdata(dev);
47 const struct lpass_variant *v = drvdata->variant;
49 i2sctl->loopback = devm_regmap_field_alloc(dev, map, v->loopback);
50 i2sctl->spken = devm_regmap_field_alloc(dev, map, v->spken);
51 i2sctl->spkmode = devm_regmap_field_alloc(dev, map, v->spkmode);
52 i2sctl->spkmono = devm_regmap_field_alloc(dev, map, v->spkmono);
53 i2sctl->micen = devm_regmap_field_alloc(dev, map, v->micen);
54 i2sctl->micmode = devm_regmap_field_alloc(dev, map, v->micmode);
55 i2sctl->micmono = devm_regmap_field_alloc(dev, map, v->micmono);
56 i2sctl->wssrc = devm_regmap_field_alloc(dev, map, v->wssrc);
57 i2sctl->bitwidth = devm_regmap_field_alloc(dev, map, v->bitwidth);
59 if (IS_ERR(i2sctl->loopback) || IS_ERR(i2sctl->spken) ||
60 IS_ERR(i2sctl->spkmode) || IS_ERR(i2sctl->spkmono) ||
61 IS_ERR(i2sctl->micen) || IS_ERR(i2sctl->micmode) ||
62 IS_ERR(i2sctl->micmono) || IS_ERR(i2sctl->wssrc) ||
63 IS_ERR(i2sctl->bitwidth))
69 static int lpass_cpu_daiops_set_sysclk(struct snd_soc_dai *dai, int clk_id,
70 unsigned int freq, int dir)
72 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
75 ret = clk_set_rate(drvdata->mi2s_osr_clk[dai->driver->id], freq);
77 dev_err(dai->dev, "error setting mi2s osrclk to %u: %d\n",
83 static int lpass_cpu_daiops_startup(struct snd_pcm_substream *substream,
84 struct snd_soc_dai *dai)
86 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
89 ret = clk_prepare_enable(drvdata->mi2s_osr_clk[dai->driver->id]);
91 dev_err(dai->dev, "error in enabling mi2s osr clk: %d\n", ret);
94 ret = clk_prepare(drvdata->mi2s_bit_clk[dai->driver->id]);
96 dev_err(dai->dev, "error in enabling mi2s bit clk: %d\n", ret);
97 clk_disable_unprepare(drvdata->mi2s_osr_clk[dai->driver->id]);
103 static void lpass_cpu_daiops_shutdown(struct snd_pcm_substream *substream,
104 struct snd_soc_dai *dai)
106 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
107 struct lpaif_i2sctl *i2sctl = drvdata->i2sctl;
108 unsigned int id = dai->driver->id;
110 clk_disable_unprepare(drvdata->mi2s_osr_clk[dai->driver->id]);
112 * Ensure LRCLK is disabled even in device node validation.
113 * Will not impact if disabled in lpass_cpu_daiops_trigger()
116 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
117 regmap_fields_write(i2sctl->spken, id, LPAIF_I2SCTL_SPKEN_DISABLE);
119 regmap_fields_write(i2sctl->micen, id, LPAIF_I2SCTL_MICEN_DISABLE);
122 * BCLK may not be enabled if lpass_cpu_daiops_prepare is called before
123 * lpass_cpu_daiops_shutdown. It's paired with the clk_enable in
124 * lpass_cpu_daiops_prepare.
126 if (drvdata->mi2s_was_prepared[dai->driver->id]) {
127 drvdata->mi2s_was_prepared[dai->driver->id] = false;
128 clk_disable(drvdata->mi2s_bit_clk[dai->driver->id]);
131 clk_unprepare(drvdata->mi2s_bit_clk[dai->driver->id]);
134 static int lpass_cpu_daiops_hw_params(struct snd_pcm_substream *substream,
135 struct snd_pcm_hw_params *params, struct snd_soc_dai *dai)
137 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
138 struct lpaif_i2sctl *i2sctl = drvdata->i2sctl;
139 unsigned int id = dai->driver->id;
140 snd_pcm_format_t format = params_format(params);
141 unsigned int channels = params_channels(params);
142 unsigned int rate = params_rate(params);
147 bitwidth = snd_pcm_format_width(format);
149 dev_err(dai->dev, "invalid bit width given: %d\n", bitwidth);
153 ret = regmap_fields_write(i2sctl->loopback, id,
154 LPAIF_I2SCTL_LOOPBACK_DISABLE);
156 dev_err(dai->dev, "error updating loopback field: %d\n", ret);
160 ret = regmap_fields_write(i2sctl->wssrc, id,
161 LPAIF_I2SCTL_WSSRC_INTERNAL);
163 dev_err(dai->dev, "error updating wssrc field: %d\n", ret);
169 regval = LPAIF_I2SCTL_BITWIDTH_16;
172 regval = LPAIF_I2SCTL_BITWIDTH_24;
175 regval = LPAIF_I2SCTL_BITWIDTH_32;
178 dev_err(dai->dev, "invalid bitwidth given: %d\n", bitwidth);
182 ret = regmap_fields_write(i2sctl->bitwidth, id, regval);
184 dev_err(dai->dev, "error updating bitwidth field: %d\n", ret);
188 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
189 mode = drvdata->mi2s_playback_sd_mode[id];
191 mode = drvdata->mi2s_capture_sd_mode[id];
194 dev_err(dai->dev, "no line is assigned\n");
202 case LPAIF_I2SCTL_MODE_QUAD01:
203 case LPAIF_I2SCTL_MODE_6CH:
204 case LPAIF_I2SCTL_MODE_8CH:
205 mode = LPAIF_I2SCTL_MODE_SD0;
207 case LPAIF_I2SCTL_MODE_QUAD23:
208 mode = LPAIF_I2SCTL_MODE_SD2;
214 if (mode < LPAIF_I2SCTL_MODE_QUAD01) {
215 dev_err(dai->dev, "cannot configure 4 channels with mode %d\n",
221 case LPAIF_I2SCTL_MODE_6CH:
222 case LPAIF_I2SCTL_MODE_8CH:
223 mode = LPAIF_I2SCTL_MODE_QUAD01;
228 if (mode < LPAIF_I2SCTL_MODE_6CH) {
229 dev_err(dai->dev, "cannot configure 6 channels with mode %d\n",
235 case LPAIF_I2SCTL_MODE_8CH:
236 mode = LPAIF_I2SCTL_MODE_6CH;
241 if (mode < LPAIF_I2SCTL_MODE_8CH) {
242 dev_err(dai->dev, "cannot configure 8 channels with mode %d\n",
248 dev_err(dai->dev, "invalid channels given: %u\n", channels);
252 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
253 ret = regmap_fields_write(i2sctl->spkmode, id,
254 LPAIF_I2SCTL_SPKMODE(mode));
256 dev_err(dai->dev, "error writing to i2sctl spkr mode: %d\n",
261 ret = regmap_fields_write(i2sctl->spkmono, id,
262 LPAIF_I2SCTL_SPKMONO_STEREO);
264 ret = regmap_fields_write(i2sctl->spkmono, id,
265 LPAIF_I2SCTL_SPKMONO_MONO);
267 ret = regmap_fields_write(i2sctl->micmode, id,
268 LPAIF_I2SCTL_MICMODE(mode));
270 dev_err(dai->dev, "error writing to i2sctl mic mode: %d\n",
275 ret = regmap_fields_write(i2sctl->micmono, id,
276 LPAIF_I2SCTL_MICMONO_STEREO);
278 ret = regmap_fields_write(i2sctl->micmono, id,
279 LPAIF_I2SCTL_MICMONO_MONO);
283 dev_err(dai->dev, "error writing to i2sctl channels mode: %d\n",
288 ret = clk_set_rate(drvdata->mi2s_bit_clk[id],
289 rate * bitwidth * 2);
291 dev_err(dai->dev, "error setting mi2s bitclk to %u: %d\n",
292 rate * bitwidth * 2, ret);
299 static int lpass_cpu_daiops_trigger(struct snd_pcm_substream *substream,
300 int cmd, struct snd_soc_dai *dai)
302 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
303 struct lpaif_i2sctl *i2sctl = drvdata->i2sctl;
304 unsigned int id = dai->driver->id;
308 case SNDRV_PCM_TRIGGER_START:
309 case SNDRV_PCM_TRIGGER_RESUME:
310 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
312 * Ensure lpass BCLK/LRCLK is enabled during
313 * device resume as lpass_cpu_daiops_prepare() is not called
314 * after the device resumes. We don't check mi2s_was_prepared before
315 * enable/disable BCLK in trigger events because:
316 * 1. These trigger events are paired, so the BCLK
317 * enable_count is balanced.
318 * 2. the BCLK can be shared (ex: headset and headset mic),
319 * we need to increase the enable_count so that we don't
320 * turn off the shared BCLK while other devices are using
323 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
324 ret = regmap_fields_write(i2sctl->spken, id,
325 LPAIF_I2SCTL_SPKEN_ENABLE);
327 ret = regmap_fields_write(i2sctl->micen, id,
328 LPAIF_I2SCTL_MICEN_ENABLE);
331 dev_err(dai->dev, "error writing to i2sctl reg: %d\n",
334 ret = clk_enable(drvdata->mi2s_bit_clk[id]);
336 dev_err(dai->dev, "error in enabling mi2s bit clk: %d\n", ret);
337 clk_disable(drvdata->mi2s_osr_clk[id]);
341 case SNDRV_PCM_TRIGGER_STOP:
342 case SNDRV_PCM_TRIGGER_SUSPEND:
343 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
345 * To ensure lpass BCLK/LRCLK is disabled during
348 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
349 ret = regmap_fields_write(i2sctl->spken, id,
350 LPAIF_I2SCTL_SPKEN_DISABLE);
352 ret = regmap_fields_write(i2sctl->micen, id,
353 LPAIF_I2SCTL_MICEN_DISABLE);
356 dev_err(dai->dev, "error writing to i2sctl reg: %d\n",
359 clk_disable(drvdata->mi2s_bit_clk[dai->driver->id]);
367 static int lpass_cpu_daiops_prepare(struct snd_pcm_substream *substream,
368 struct snd_soc_dai *dai)
370 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
371 struct lpaif_i2sctl *i2sctl = drvdata->i2sctl;
372 unsigned int id = dai->driver->id;
376 * Ensure lpass BCLK/LRCLK is enabled bit before playback/capture
377 * data flow starts. This allows other codec to have some delay before
379 * (ex: to drop start up pop noise before capture starts).
381 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
382 ret = regmap_fields_write(i2sctl->spken, id, LPAIF_I2SCTL_SPKEN_ENABLE);
384 ret = regmap_fields_write(i2sctl->micen, id, LPAIF_I2SCTL_MICEN_ENABLE);
387 dev_err(dai->dev, "error writing to i2sctl reg: %d\n", ret);
392 * Check mi2s_was_prepared before enabling BCLK as lpass_cpu_daiops_prepare can
393 * be called multiple times. It's paired with the clk_disable in
394 * lpass_cpu_daiops_shutdown.
396 if (!drvdata->mi2s_was_prepared[dai->driver->id]) {
397 ret = clk_enable(drvdata->mi2s_bit_clk[id]);
399 dev_err(dai->dev, "error in enabling mi2s bit clk: %d\n", ret);
402 drvdata->mi2s_was_prepared[dai->driver->id] = true;
407 static int lpass_cpu_daiops_pcm_new(struct snd_soc_pcm_runtime *rtd, struct snd_soc_dai *dai)
410 struct snd_soc_dai_driver *drv = dai->driver;
411 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
413 if (drvdata->mi2s_playback_sd_mode[dai->id] == LPAIF_I2SCTL_MODE_QUAD01) {
414 ret = snd_pcm_add_chmap_ctls(rtd->pcm, SNDRV_PCM_STREAM_PLAYBACK,
415 lpass_quad_chmaps, drv->playback.channels_max, 0,
424 static int lpass_cpu_daiops_probe(struct snd_soc_dai *dai)
426 struct lpass_data *drvdata = snd_soc_dai_get_drvdata(dai);
429 /* ensure audio hardware is disabled */
430 ret = regmap_write(drvdata->lpaif_map,
431 LPAIF_I2SCTL_REG(drvdata->variant, dai->driver->id), 0);
433 dev_err(dai->dev, "error writing to i2sctl reg: %d\n", ret);
438 const struct snd_soc_dai_ops asoc_qcom_lpass_cpu_dai_ops = {
439 .probe = lpass_cpu_daiops_probe,
440 .set_sysclk = lpass_cpu_daiops_set_sysclk,
441 .startup = lpass_cpu_daiops_startup,
442 .shutdown = lpass_cpu_daiops_shutdown,
443 .hw_params = lpass_cpu_daiops_hw_params,
444 .trigger = lpass_cpu_daiops_trigger,
445 .prepare = lpass_cpu_daiops_prepare,
447 EXPORT_SYMBOL_GPL(asoc_qcom_lpass_cpu_dai_ops);
449 const struct snd_soc_dai_ops asoc_qcom_lpass_cpu_dai_ops2 = {
450 .pcm_new = lpass_cpu_daiops_pcm_new,
451 .probe = lpass_cpu_daiops_probe,
452 .set_sysclk = lpass_cpu_daiops_set_sysclk,
453 .startup = lpass_cpu_daiops_startup,
454 .shutdown = lpass_cpu_daiops_shutdown,
455 .hw_params = lpass_cpu_daiops_hw_params,
456 .trigger = lpass_cpu_daiops_trigger,
457 .prepare = lpass_cpu_daiops_prepare,
459 EXPORT_SYMBOL_GPL(asoc_qcom_lpass_cpu_dai_ops2);
461 static int asoc_qcom_of_xlate_dai_name(struct snd_soc_component *component,
462 const struct of_phandle_args *args,
463 const char **dai_name)
465 struct lpass_data *drvdata = snd_soc_component_get_drvdata(component);
466 const struct lpass_variant *variant = drvdata->variant;
467 int id = args->args[0];
471 for (i = 0; i < variant->num_dai; i++) {
472 if (variant->dai_driver[i].id == id) {
473 *dai_name = variant->dai_driver[i].name;
482 static const struct snd_soc_component_driver lpass_cpu_comp_driver = {
484 .of_xlate_dai_name = asoc_qcom_of_xlate_dai_name,
485 .legacy_dai_naming = 1,
488 static bool lpass_cpu_regmap_writeable(struct device *dev, unsigned int reg)
490 struct lpass_data *drvdata = dev_get_drvdata(dev);
491 const struct lpass_variant *v = drvdata->variant;
494 for (i = 0; i < v->i2s_ports; ++i)
495 if (reg == LPAIF_I2SCTL_REG(v, i))
498 for (i = 0; i < v->irq_ports; ++i) {
499 if (reg == LPAIF_IRQEN_REG(v, i))
501 if (reg == LPAIF_IRQCLEAR_REG(v, i))
505 for (i = 0; i < v->rdma_channels; ++i) {
506 if (reg == LPAIF_RDMACTL_REG(v, i))
508 if (reg == LPAIF_RDMABASE_REG(v, i))
510 if (reg == LPAIF_RDMABUFF_REG(v, i))
512 if (reg == LPAIF_RDMAPER_REG(v, i))
516 for (i = 0; i < v->wrdma_channels; ++i) {
517 if (reg == LPAIF_WRDMACTL_REG(v, i + v->wrdma_channel_start))
519 if (reg == LPAIF_WRDMABASE_REG(v, i + v->wrdma_channel_start))
521 if (reg == LPAIF_WRDMABUFF_REG(v, i + v->wrdma_channel_start))
523 if (reg == LPAIF_WRDMAPER_REG(v, i + v->wrdma_channel_start))
530 static bool lpass_cpu_regmap_readable(struct device *dev, unsigned int reg)
532 struct lpass_data *drvdata = dev_get_drvdata(dev);
533 const struct lpass_variant *v = drvdata->variant;
536 for (i = 0; i < v->i2s_ports; ++i)
537 if (reg == LPAIF_I2SCTL_REG(v, i))
540 for (i = 0; i < v->irq_ports; ++i) {
541 if (reg == LPAIF_IRQCLEAR_REG(v, i))
543 if (reg == LPAIF_IRQEN_REG(v, i))
545 if (reg == LPAIF_IRQSTAT_REG(v, i))
549 for (i = 0; i < v->rdma_channels; ++i) {
550 if (reg == LPAIF_RDMACTL_REG(v, i))
552 if (reg == LPAIF_RDMABASE_REG(v, i))
554 if (reg == LPAIF_RDMABUFF_REG(v, i))
556 if (reg == LPAIF_RDMACURR_REG(v, i))
558 if (reg == LPAIF_RDMAPER_REG(v, i))
562 for (i = 0; i < v->wrdma_channels; ++i) {
563 if (reg == LPAIF_WRDMACTL_REG(v, i + v->wrdma_channel_start))
565 if (reg == LPAIF_WRDMABASE_REG(v, i + v->wrdma_channel_start))
567 if (reg == LPAIF_WRDMABUFF_REG(v, i + v->wrdma_channel_start))
569 if (reg == LPAIF_WRDMACURR_REG(v, i + v->wrdma_channel_start))
571 if (reg == LPAIF_WRDMAPER_REG(v, i + v->wrdma_channel_start))
578 static bool lpass_cpu_regmap_volatile(struct device *dev, unsigned int reg)
580 struct lpass_data *drvdata = dev_get_drvdata(dev);
581 const struct lpass_variant *v = drvdata->variant;
584 for (i = 0; i < v->irq_ports; ++i) {
585 if (reg == LPAIF_IRQCLEAR_REG(v, i))
587 if (reg == LPAIF_IRQSTAT_REG(v, i))
591 for (i = 0; i < v->rdma_channels; ++i)
592 if (reg == LPAIF_RDMACURR_REG(v, i))
595 for (i = 0; i < v->wrdma_channels; ++i)
596 if (reg == LPAIF_WRDMACURR_REG(v, i + v->wrdma_channel_start))
602 static struct regmap_config lpass_cpu_regmap_config = {
607 .writeable_reg = lpass_cpu_regmap_writeable,
608 .readable_reg = lpass_cpu_regmap_readable,
609 .volatile_reg = lpass_cpu_regmap_volatile,
610 .cache_type = REGCACHE_FLAT,
613 static int lpass_hdmi_init_bitfields(struct device *dev, struct regmap *map)
615 struct lpass_data *drvdata = dev_get_drvdata(dev);
616 const struct lpass_variant *v = drvdata->variant;
618 struct lpass_hdmi_tx_ctl *tx_ctl;
619 struct regmap_field *legacy_en;
620 struct lpass_vbit_ctrl *vbit_ctl;
621 struct regmap_field *tx_parity;
622 struct lpass_dp_metadata_ctl *meta_ctl;
623 struct lpass_sstream_ctl *sstream_ctl;
624 struct regmap_field *ch_msb;
625 struct regmap_field *ch_lsb;
626 struct lpass_hdmitx_dmactl *tx_dmactl;
629 tx_ctl = devm_kzalloc(dev, sizeof(*tx_ctl), GFP_KERNEL);
633 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->soft_reset, tx_ctl->soft_reset);
634 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->force_reset, tx_ctl->force_reset);
635 drvdata->tx_ctl = tx_ctl;
637 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->legacy_en, legacy_en);
638 drvdata->hdmitx_legacy_en = legacy_en;
640 vbit_ctl = devm_kzalloc(dev, sizeof(*vbit_ctl), GFP_KERNEL);
644 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->replace_vbit, vbit_ctl->replace_vbit);
645 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->vbit_stream, vbit_ctl->vbit_stream);
646 drvdata->vbit_ctl = vbit_ctl;
649 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->calc_en, tx_parity);
650 drvdata->hdmitx_parity_calc_en = tx_parity;
652 meta_ctl = devm_kzalloc(dev, sizeof(*meta_ctl), GFP_KERNEL);
656 rval = devm_regmap_field_bulk_alloc(dev, map, &meta_ctl->mute, &v->mute, 7);
659 drvdata->meta_ctl = meta_ctl;
661 sstream_ctl = devm_kzalloc(dev, sizeof(*sstream_ctl), GFP_KERNEL);
665 rval = devm_regmap_field_bulk_alloc(dev, map, &sstream_ctl->sstream_en, &v->sstream_en, 9);
669 drvdata->sstream_ctl = sstream_ctl;
671 for (i = 0; i < LPASS_MAX_HDMI_DMA_CHANNELS; i++) {
672 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->msb_bits, ch_msb);
673 drvdata->hdmitx_ch_msb[i] = ch_msb;
675 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->lsb_bits, ch_lsb);
676 drvdata->hdmitx_ch_lsb[i] = ch_lsb;
678 tx_dmactl = devm_kzalloc(dev, sizeof(*tx_dmactl), GFP_KERNEL);
682 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->use_hw_chs, tx_dmactl->use_hw_chs);
683 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->use_hw_usr, tx_dmactl->use_hw_usr);
684 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->hw_chs_sel, tx_dmactl->hw_chs_sel);
685 QCOM_REGMAP_FIELD_ALLOC(dev, map, v->hw_usr_sel, tx_dmactl->hw_usr_sel);
686 drvdata->hdmi_tx_dmactl[i] = tx_dmactl;
691 static bool lpass_hdmi_regmap_writeable(struct device *dev, unsigned int reg)
693 struct lpass_data *drvdata = dev_get_drvdata(dev);
694 const struct lpass_variant *v = drvdata->variant;
697 if (reg == LPASS_HDMI_TX_CTL_ADDR(v))
699 if (reg == LPASS_HDMI_TX_LEGACY_ADDR(v))
701 if (reg == LPASS_HDMI_TX_VBIT_CTL_ADDR(v))
703 if (reg == LPASS_HDMI_TX_PARITY_ADDR(v))
705 if (reg == LPASS_HDMI_TX_DP_ADDR(v))
707 if (reg == LPASS_HDMI_TX_SSTREAM_ADDR(v))
709 if (reg == LPASS_HDMITX_APP_IRQEN_REG(v))
711 if (reg == LPASS_HDMITX_APP_IRQCLEAR_REG(v))
714 for (i = 0; i < v->hdmi_rdma_channels; i++) {
715 if (reg == LPASS_HDMI_TX_CH_LSB_ADDR(v, i))
717 if (reg == LPASS_HDMI_TX_CH_MSB_ADDR(v, i))
719 if (reg == LPASS_HDMI_TX_DMA_ADDR(v, i))
723 for (i = 0; i < v->hdmi_rdma_channels; ++i) {
724 if (reg == LPAIF_HDMI_RDMACTL_REG(v, i))
726 if (reg == LPAIF_HDMI_RDMABASE_REG(v, i))
728 if (reg == LPAIF_HDMI_RDMABUFF_REG(v, i))
730 if (reg == LPAIF_HDMI_RDMAPER_REG(v, i))
736 static bool lpass_hdmi_regmap_readable(struct device *dev, unsigned int reg)
738 struct lpass_data *drvdata = dev_get_drvdata(dev);
739 const struct lpass_variant *v = drvdata->variant;
742 if (reg == LPASS_HDMI_TX_CTL_ADDR(v))
744 if (reg == LPASS_HDMI_TX_LEGACY_ADDR(v))
746 if (reg == LPASS_HDMI_TX_VBIT_CTL_ADDR(v))
749 for (i = 0; i < v->hdmi_rdma_channels; i++) {
750 if (reg == LPASS_HDMI_TX_CH_LSB_ADDR(v, i))
752 if (reg == LPASS_HDMI_TX_CH_MSB_ADDR(v, i))
754 if (reg == LPASS_HDMI_TX_DMA_ADDR(v, i))
758 if (reg == LPASS_HDMI_TX_PARITY_ADDR(v))
760 if (reg == LPASS_HDMI_TX_DP_ADDR(v))
762 if (reg == LPASS_HDMI_TX_SSTREAM_ADDR(v))
764 if (reg == LPASS_HDMITX_APP_IRQEN_REG(v))
766 if (reg == LPASS_HDMITX_APP_IRQSTAT_REG(v))
769 for (i = 0; i < v->hdmi_rdma_channels; ++i) {
770 if (reg == LPAIF_HDMI_RDMACTL_REG(v, i))
772 if (reg == LPAIF_HDMI_RDMABASE_REG(v, i))
774 if (reg == LPAIF_HDMI_RDMABUFF_REG(v, i))
776 if (reg == LPAIF_HDMI_RDMAPER_REG(v, i))
778 if (reg == LPAIF_HDMI_RDMACURR_REG(v, i))
785 static bool lpass_hdmi_regmap_volatile(struct device *dev, unsigned int reg)
787 struct lpass_data *drvdata = dev_get_drvdata(dev);
788 const struct lpass_variant *v = drvdata->variant;
791 if (reg == LPASS_HDMITX_APP_IRQSTAT_REG(v))
793 if (reg == LPASS_HDMI_TX_LEGACY_ADDR(v))
795 if (reg == LPASS_HDMI_TX_VBIT_CTL_ADDR(v))
797 if (reg == LPASS_HDMI_TX_PARITY_ADDR(v))
800 for (i = 0; i < v->hdmi_rdma_channels; ++i) {
801 if (reg == LPAIF_HDMI_RDMACURR_REG(v, i))
803 if (reg == LPASS_HDMI_TX_DMA_ADDR(v, i))
805 if (reg == LPASS_HDMI_TX_CH_LSB_ADDR(v, i))
807 if (reg == LPASS_HDMI_TX_CH_MSB_ADDR(v, i))
813 static struct regmap_config lpass_hdmi_regmap_config = {
814 .name = "lpass_hdmi",
818 .writeable_reg = lpass_hdmi_regmap_writeable,
819 .readable_reg = lpass_hdmi_regmap_readable,
820 .volatile_reg = lpass_hdmi_regmap_volatile,
821 .cache_type = REGCACHE_FLAT,
824 static bool __lpass_rxtx_regmap_accessible(struct device *dev, unsigned int reg, bool rw)
826 struct lpass_data *drvdata = dev_get_drvdata(dev);
827 const struct lpass_variant *v = drvdata->variant;
830 for (i = 0; i < v->rxtx_irq_ports; ++i) {
831 if (reg == LPAIF_RXTX_IRQCLEAR_REG(v, i))
833 if (reg == LPAIF_RXTX_IRQEN_REG(v, i))
835 if (reg == LPAIF_RXTX_IRQSTAT_REG(v, i))
839 for (i = 0; i < v->rxtx_rdma_channels; ++i) {
840 if (reg == LPAIF_CDC_RXTX_RDMACTL_REG(v, i, LPASS_CDC_DMA_RX0))
842 if (reg == LPAIF_CDC_RXTX_RDMABASE_REG(v, i, LPASS_CDC_DMA_RX0))
844 if (reg == LPAIF_CDC_RXTX_RDMABUFF_REG(v, i, LPASS_CDC_DMA_RX0))
846 if (rw == LPASS_REG_READ) {
847 if (reg == LPAIF_CDC_RXTX_RDMACURR_REG(v, i, LPASS_CDC_DMA_RX0))
850 if (reg == LPAIF_CDC_RXTX_RDMAPER_REG(v, i, LPASS_CDC_DMA_RX0))
852 if (reg == LPAIF_CDC_RXTX_RDMA_INTF_REG(v, i, LPASS_CDC_DMA_RX0))
856 for (i = 0; i < v->rxtx_wrdma_channels; ++i) {
857 if (reg == LPAIF_CDC_RXTX_WRDMACTL_REG(v, i + v->rxtx_wrdma_channel_start,
860 if (reg == LPAIF_CDC_RXTX_WRDMABASE_REG(v, i + v->rxtx_wrdma_channel_start,
863 if (reg == LPAIF_CDC_RXTX_WRDMABUFF_REG(v, i + v->rxtx_wrdma_channel_start,
866 if (rw == LPASS_REG_READ) {
867 if (reg == LPAIF_CDC_RXTX_WRDMACURR_REG(v, i, LPASS_CDC_DMA_RX0))
870 if (reg == LPAIF_CDC_RXTX_WRDMAPER_REG(v, i + v->rxtx_wrdma_channel_start,
873 if (reg == LPAIF_CDC_RXTX_WRDMA_INTF_REG(v, i + v->rxtx_wrdma_channel_start,
880 static bool lpass_rxtx_regmap_writeable(struct device *dev, unsigned int reg)
882 return __lpass_rxtx_regmap_accessible(dev, reg, LPASS_REG_WRITE);
885 static bool lpass_rxtx_regmap_readable(struct device *dev, unsigned int reg)
887 return __lpass_rxtx_regmap_accessible(dev, reg, LPASS_REG_READ);
890 static bool lpass_rxtx_regmap_volatile(struct device *dev, unsigned int reg)
892 struct lpass_data *drvdata = dev_get_drvdata(dev);
893 const struct lpass_variant *v = drvdata->variant;
896 for (i = 0; i < v->rxtx_irq_ports; ++i) {
897 if (reg == LPAIF_RXTX_IRQCLEAR_REG(v, i))
899 if (reg == LPAIF_RXTX_IRQSTAT_REG(v, i))
903 for (i = 0; i < v->rxtx_rdma_channels; ++i)
904 if (reg == LPAIF_CDC_RXTX_RDMACURR_REG(v, i, LPASS_CDC_DMA_RX0))
907 for (i = 0; i < v->rxtx_wrdma_channels; ++i)
908 if (reg == LPAIF_CDC_RXTX_WRDMACURR_REG(v, i + v->rxtx_wrdma_channel_start,
915 static bool __lpass_va_regmap_accessible(struct device *dev, unsigned int reg, bool rw)
917 struct lpass_data *drvdata = dev_get_drvdata(dev);
918 const struct lpass_variant *v = drvdata->variant;
921 for (i = 0; i < v->va_irq_ports; ++i) {
922 if (reg == LPAIF_VA_IRQCLEAR_REG(v, i))
924 if (reg == LPAIF_VA_IRQEN_REG(v, i))
926 if (reg == LPAIF_VA_IRQSTAT_REG(v, i))
930 for (i = 0; i < v->va_wrdma_channels; ++i) {
931 if (reg == LPAIF_CDC_VA_WRDMACTL_REG(v, i + v->va_wrdma_channel_start,
932 LPASS_CDC_DMA_VA_TX0))
934 if (reg == LPAIF_CDC_VA_WRDMABASE_REG(v, i + v->va_wrdma_channel_start,
935 LPASS_CDC_DMA_VA_TX0))
937 if (reg == LPAIF_CDC_VA_WRDMABUFF_REG(v, i + v->va_wrdma_channel_start,
938 LPASS_CDC_DMA_VA_TX0))
940 if (rw == LPASS_REG_READ) {
941 if (reg == LPAIF_CDC_VA_WRDMACURR_REG(v, i + v->va_wrdma_channel_start,
942 LPASS_CDC_DMA_VA_TX0))
945 if (reg == LPAIF_CDC_VA_WRDMAPER_REG(v, i + v->va_wrdma_channel_start,
946 LPASS_CDC_DMA_VA_TX0))
948 if (reg == LPAIF_CDC_VA_WRDMA_INTF_REG(v, i + v->va_wrdma_channel_start,
949 LPASS_CDC_DMA_VA_TX0))
955 static bool lpass_va_regmap_writeable(struct device *dev, unsigned int reg)
957 return __lpass_va_regmap_accessible(dev, reg, LPASS_REG_WRITE);
960 static bool lpass_va_regmap_readable(struct device *dev, unsigned int reg)
962 return __lpass_va_regmap_accessible(dev, reg, LPASS_REG_READ);
965 static bool lpass_va_regmap_volatile(struct device *dev, unsigned int reg)
967 struct lpass_data *drvdata = dev_get_drvdata(dev);
968 const struct lpass_variant *v = drvdata->variant;
971 for (i = 0; i < v->va_irq_ports; ++i) {
972 if (reg == LPAIF_VA_IRQCLEAR_REG(v, i))
974 if (reg == LPAIF_VA_IRQSTAT_REG(v, i))
978 for (i = 0; i < v->va_wrdma_channels; ++i) {
979 if (reg == LPAIF_CDC_VA_WRDMACURR_REG(v, i + v->va_wrdma_channel_start,
980 LPASS_CDC_DMA_VA_TX0))
987 static struct regmap_config lpass_rxtx_regmap_config = {
991 .writeable_reg = lpass_rxtx_regmap_writeable,
992 .readable_reg = lpass_rxtx_regmap_readable,
993 .volatile_reg = lpass_rxtx_regmap_volatile,
994 .cache_type = REGCACHE_FLAT,
997 static struct regmap_config lpass_va_regmap_config = {
1001 .writeable_reg = lpass_va_regmap_writeable,
1002 .readable_reg = lpass_va_regmap_readable,
1003 .volatile_reg = lpass_va_regmap_volatile,
1004 .cache_type = REGCACHE_FLAT,
1007 static unsigned int of_lpass_cpu_parse_sd_lines(struct device *dev,
1008 struct device_node *node,
1011 unsigned int lines[LPASS_CPU_MAX_MI2S_LINES];
1012 unsigned int sd_line_mask = 0;
1015 num_lines = of_property_read_variable_u32_array(node, name, lines, 0,
1016 LPASS_CPU_MAX_MI2S_LINES);
1018 return LPAIF_I2SCTL_MODE_NONE;
1020 for (i = 0; i < num_lines; i++)
1021 sd_line_mask |= BIT(lines[i]);
1023 switch (sd_line_mask) {
1024 case LPASS_CPU_I2S_SD0_MASK:
1025 return LPAIF_I2SCTL_MODE_SD0;
1026 case LPASS_CPU_I2S_SD1_MASK:
1027 return LPAIF_I2SCTL_MODE_SD1;
1028 case LPASS_CPU_I2S_SD2_MASK:
1029 return LPAIF_I2SCTL_MODE_SD2;
1030 case LPASS_CPU_I2S_SD3_MASK:
1031 return LPAIF_I2SCTL_MODE_SD3;
1032 case LPASS_CPU_I2S_SD0_1_MASK:
1033 return LPAIF_I2SCTL_MODE_QUAD01;
1034 case LPASS_CPU_I2S_SD2_3_MASK:
1035 return LPAIF_I2SCTL_MODE_QUAD23;
1036 case LPASS_CPU_I2S_SD0_1_2_MASK:
1037 return LPAIF_I2SCTL_MODE_6CH;
1038 case LPASS_CPU_I2S_SD0_1_2_3_MASK:
1039 return LPAIF_I2SCTL_MODE_8CH;
1041 dev_err(dev, "Unsupported SD line mask: %#x\n", sd_line_mask);
1042 return LPAIF_I2SCTL_MODE_NONE;
1046 static void of_lpass_cpu_parse_dai_data(struct device *dev,
1047 struct lpass_data *data)
1049 struct device_node *node;
1052 /* Allow all channels by default for backwards compatibility */
1053 for (i = 0; i < data->variant->num_dai; i++) {
1054 id = data->variant->dai_driver[i].id;
1055 data->mi2s_playback_sd_mode[id] = LPAIF_I2SCTL_MODE_8CH;
1056 data->mi2s_capture_sd_mode[id] = LPAIF_I2SCTL_MODE_8CH;
1059 for_each_child_of_node(dev->of_node, node) {
1060 ret = of_property_read_u32(node, "reg", &id);
1061 if (ret || id < 0) {
1062 dev_err(dev, "valid dai id not found: %d\n", ret);
1065 if (id == LPASS_DP_RX) {
1066 data->hdmi_port_enable = 1;
1067 } else if (is_cdc_dma_port(id)) {
1068 data->codec_dma_enable = 1;
1070 data->mi2s_playback_sd_mode[id] =
1071 of_lpass_cpu_parse_sd_lines(dev, node,
1072 "qcom,playback-sd-lines");
1073 data->mi2s_capture_sd_mode[id] =
1074 of_lpass_cpu_parse_sd_lines(dev, node,
1075 "qcom,capture-sd-lines");
1080 static int of_lpass_cdc_dma_clks_parse(struct device *dev,
1081 struct lpass_data *data)
1083 data->codec_mem0 = devm_clk_get(dev, "audio_cc_codec_mem0");
1084 if (IS_ERR(data->codec_mem0))
1085 return PTR_ERR(data->codec_mem0);
1087 data->codec_mem1 = devm_clk_get(dev, "audio_cc_codec_mem1");
1088 if (IS_ERR(data->codec_mem1))
1089 return PTR_ERR(data->codec_mem1);
1091 data->codec_mem2 = devm_clk_get(dev, "audio_cc_codec_mem2");
1092 if (IS_ERR(data->codec_mem2))
1093 return PTR_ERR(data->codec_mem2);
1095 data->va_mem0 = devm_clk_get(dev, "aon_cc_va_mem0");
1096 if (IS_ERR(data->va_mem0))
1097 return PTR_ERR(data->va_mem0);
1102 int asoc_qcom_lpass_cpu_platform_probe(struct platform_device *pdev)
1104 struct lpass_data *drvdata;
1105 struct device_node *dsp_of_node;
1106 struct resource *res;
1107 const struct lpass_variant *variant;
1108 struct device *dev = &pdev->dev;
1111 dsp_of_node = of_parse_phandle(pdev->dev.of_node, "qcom,adsp", 0);
1113 dev_err(dev, "DSP exists and holds audio resources\n");
1114 of_node_put(dsp_of_node);
1118 drvdata = devm_kzalloc(dev, sizeof(struct lpass_data), GFP_KERNEL);
1121 platform_set_drvdata(pdev, drvdata);
1123 variant = device_get_match_data(dev);
1127 if (of_device_is_compatible(dev->of_node, "qcom,lpass-cpu-apq8016"))
1128 dev_warn(dev, "qcom,lpass-cpu-apq8016 compatible is deprecated\n");
1130 drvdata->variant = variant;
1132 of_lpass_cpu_parse_dai_data(dev, drvdata);
1134 if (drvdata->codec_dma_enable) {
1135 drvdata->rxtx_lpaif =
1136 devm_platform_ioremap_resource_byname(pdev, "lpass-rxtx-lpaif");
1137 if (IS_ERR(drvdata->rxtx_lpaif))
1138 return PTR_ERR(drvdata->rxtx_lpaif);
1140 drvdata->va_lpaif = devm_platform_ioremap_resource_byname(pdev, "lpass-va-lpaif");
1141 if (IS_ERR(drvdata->va_lpaif))
1142 return PTR_ERR(drvdata->va_lpaif);
1144 lpass_rxtx_regmap_config.max_register = LPAIF_CDC_RXTX_WRDMAPER_REG(variant,
1145 variant->rxtx_wrdma_channels +
1146 variant->rxtx_wrdma_channel_start, LPASS_CDC_DMA_TX3);
1148 drvdata->rxtx_lpaif_map = devm_regmap_init_mmio(dev, drvdata->rxtx_lpaif,
1149 &lpass_rxtx_regmap_config);
1150 if (IS_ERR(drvdata->rxtx_lpaif_map))
1151 return PTR_ERR(drvdata->rxtx_lpaif_map);
1153 lpass_va_regmap_config.max_register = LPAIF_CDC_VA_WRDMAPER_REG(variant,
1154 variant->va_wrdma_channels +
1155 variant->va_wrdma_channel_start, LPASS_CDC_DMA_VA_TX0);
1157 drvdata->va_lpaif_map = devm_regmap_init_mmio(dev, drvdata->va_lpaif,
1158 &lpass_va_regmap_config);
1159 if (IS_ERR(drvdata->va_lpaif_map))
1160 return PTR_ERR(drvdata->va_lpaif_map);
1162 ret = of_lpass_cdc_dma_clks_parse(dev, drvdata);
1164 dev_err(dev, "failed to get cdc dma clocks %d\n", ret);
1168 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "lpass-rxtx-cdc-dma-lpm");
1169 drvdata->rxtx_cdc_dma_lpm_buf = res->start;
1171 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "lpass-va-cdc-dma-lpm");
1172 drvdata->va_cdc_dma_lpm_buf = res->start;
1175 drvdata->lpaif = devm_platform_ioremap_resource_byname(pdev, "lpass-lpaif");
1176 if (IS_ERR(drvdata->lpaif))
1177 return PTR_ERR(drvdata->lpaif);
1179 lpass_cpu_regmap_config.max_register = LPAIF_WRDMAPER_REG(variant,
1180 variant->wrdma_channels +
1181 variant->wrdma_channel_start);
1183 drvdata->lpaif_map = devm_regmap_init_mmio(dev, drvdata->lpaif,
1184 &lpass_cpu_regmap_config);
1185 if (IS_ERR(drvdata->lpaif_map)) {
1186 dev_err(dev, "error initializing regmap: %ld\n",
1187 PTR_ERR(drvdata->lpaif_map));
1188 return PTR_ERR(drvdata->lpaif_map);
1191 if (drvdata->hdmi_port_enable) {
1192 drvdata->hdmiif = devm_platform_ioremap_resource_byname(pdev, "lpass-hdmiif");
1193 if (IS_ERR(drvdata->hdmiif))
1194 return PTR_ERR(drvdata->hdmiif);
1196 lpass_hdmi_regmap_config.max_register = LPAIF_HDMI_RDMAPER_REG(variant,
1197 variant->hdmi_rdma_channels - 1);
1198 drvdata->hdmiif_map = devm_regmap_init_mmio(dev, drvdata->hdmiif,
1199 &lpass_hdmi_regmap_config);
1200 if (IS_ERR(drvdata->hdmiif_map)) {
1201 dev_err(dev, "error initializing regmap: %ld\n",
1202 PTR_ERR(drvdata->hdmiif_map));
1203 return PTR_ERR(drvdata->hdmiif_map);
1207 if (variant->init) {
1208 ret = variant->init(pdev);
1210 dev_err(dev, "error initializing variant: %d\n", ret);
1215 for (i = 0; i < variant->num_dai; i++) {
1216 dai_id = variant->dai_driver[i].id;
1217 if (dai_id == LPASS_DP_RX || is_cdc_dma_port(dai_id))
1220 drvdata->mi2s_osr_clk[dai_id] = devm_clk_get_optional(dev,
1221 variant->dai_osr_clk_names[i]);
1222 drvdata->mi2s_bit_clk[dai_id] = devm_clk_get(dev,
1223 variant->dai_bit_clk_names[i]);
1224 if (IS_ERR(drvdata->mi2s_bit_clk[dai_id])) {
1226 "error getting %s: %ld\n",
1227 variant->dai_bit_clk_names[i],
1228 PTR_ERR(drvdata->mi2s_bit_clk[dai_id]));
1229 return PTR_ERR(drvdata->mi2s_bit_clk[dai_id]);
1231 if (drvdata->mi2s_playback_sd_mode[dai_id] ==
1232 LPAIF_I2SCTL_MODE_QUAD01) {
1233 variant->dai_driver[dai_id].playback.channels_min = 4;
1234 variant->dai_driver[dai_id].playback.channels_max = 4;
1238 /* Allocation for i2sctl regmap fields */
1239 drvdata->i2sctl = devm_kzalloc(&pdev->dev, sizeof(struct lpaif_i2sctl),
1242 /* Initialize bitfields for dai I2SCTL register */
1243 ret = lpass_cpu_init_i2sctl_bitfields(dev, drvdata->i2sctl,
1244 drvdata->lpaif_map);
1246 dev_err(dev, "error init i2sctl field: %d\n", ret);
1250 if (drvdata->hdmi_port_enable) {
1251 ret = lpass_hdmi_init_bitfields(dev, drvdata->hdmiif_map);
1253 dev_err(dev, "%s error hdmi init failed\n", __func__);
1257 ret = devm_snd_soc_register_component(dev,
1258 &lpass_cpu_comp_driver,
1259 variant->dai_driver,
1262 dev_err(dev, "error registering cpu driver: %d\n", ret);
1266 ret = asoc_qcom_lpass_platform_register(pdev);
1268 dev_err(dev, "error registering platform driver: %d\n", ret);
1275 EXPORT_SYMBOL_GPL(asoc_qcom_lpass_cpu_platform_probe);
1277 void asoc_qcom_lpass_cpu_platform_remove(struct platform_device *pdev)
1279 struct lpass_data *drvdata = platform_get_drvdata(pdev);
1281 if (drvdata->variant->exit)
1282 drvdata->variant->exit(pdev);
1284 EXPORT_SYMBOL_GPL(asoc_qcom_lpass_cpu_platform_remove);
1286 void asoc_qcom_lpass_cpu_platform_shutdown(struct platform_device *pdev)
1288 struct lpass_data *drvdata = platform_get_drvdata(pdev);
1290 if (drvdata->variant->exit)
1291 drvdata->variant->exit(pdev);
1294 EXPORT_SYMBOL_GPL(asoc_qcom_lpass_cpu_platform_shutdown);
1296 MODULE_DESCRIPTION("QTi LPASS CPU Driver");
1297 MODULE_LICENSE("GPL v2");