hc
2024-01-03 2f7c68cb55ecb7331f2381deb497c27155f32faf
kernel/sound/soc/rockchip/rockchip_dlp.c
....@@ -1,4 +1,4 @@
1
-// SPDX-License-Identifier: GPL-2.0
1
+// SPDX-License-Identifier: GPL-2.0-or-later
22 /*
33 * Rockchip DLP (Digital Loopback) Driver
44 *
....@@ -11,28 +11,15 @@
1111 #include <linux/module.h>
1212 #include <linux/init.h>
1313 #include <linux/dmaengine.h>
14
+#include <linux/dma-mapping.h>
1415 #include <linux/slab.h>
1516 #include <sound/pcm.h>
1617 #include <sound/pcm_params.h>
1718 #include <sound/soc.h>
18
-#include <linux/dma-mapping.h>
19
-#include <linux/of.h>
2019
21
-#include <sound/dmaengine_pcm.h>
2220 #include "rockchip_dlp.h"
2321
24
-#ifdef DLP_DBG
25
-#define dlp_info(args...) pr_info(args)
26
-#else
27
-#define dlp_info(args...) no_printk(args)
28
-#endif
29
-
30
-#define SND_DMAENGINE_DLP_DRV_NAME "snd_dmaengine_dlp"
3122 #define PBUF_CNT 2
32
-
33
-static unsigned int prealloc_buffer_size_kbytes = 512;
34
-module_param(prealloc_buffer_size_kbytes, uint, 0444);
35
-MODULE_PARM_DESC(prealloc_buffer_size_kbytes, "Preallocate DMA buffer size (KB).");
3623
3724 /* MUST: dlp_text should be match to enum dlp_mode */
3825 static const char *const dlp_text[] = {
....@@ -55,74 +42,43 @@
5542 "16CH: 8 Mics + 8 Loopbacks",
5643 };
5744
58
-enum dlp_mode {
59
- DLP_MODE_DISABLED,
60
- DLP_MODE_2CH_1LP_1MIC, /* replace cap-ch-0 with play-ch-0 */
61
- DLP_MODE_2CH_1MIC_1LP, /* replace cap-ch-1 with play-ch-1 */
62
- DLP_MODE_2CH_1MIC_1LP_MIX, /* replace cap-ch-1 with play-ch-all-mix */
63
- DLP_MODE_2CH_2LP, /* replace cap-ch-0~1 with play-ch-0~1 */
64
- DLP_MODE_4CH_2MIC_2LP, /* replace cap-ch-2~3 with play-ch-0~1 */
65
- DLP_MODE_4CH_2MIC_1LP_MIX, /* replace cap-ch-3 with play-ch-all-mix */
66
- DLP_MODE_4CH_4LP, /* replace cap-ch-0~3 with play-ch-0~3 */
67
- DLP_MODE_6CH_4MIC_2LP, /* replace cap-ch-4~5 with play-ch-0~1 */
68
- DLP_MODE_6CH_4MIC_1LP_MIX, /* replace cap-ch-4 with play-ch-all-mix */
69
- DLP_MODE_6CH_6LP, /* replace cap-ch-0~5 with play-ch-0~5 */
70
- DLP_MODE_8CH_6MIC_2LP, /* replace cap-ch-6~7 with play-ch-0~1 */
71
- DLP_MODE_8CH_6MIC_1LP_MIX, /* replace cap-ch-6 with play-ch-all-mix */
72
- DLP_MODE_8CH_8LP, /* replace cap-ch-0~7 with play-ch-0~7 */
73
- DLP_MODE_10CH_8MIC_2LP, /* replace cap-ch-8~9 with play-ch-0~1 */
74
- DLP_MODE_10CH_8MIC_1LP_MIX, /* replace cap-ch-8 with play-ch-all-mix */
75
- DLP_MODE_16CH_8MIC_8LP, /* replace cap-ch-8~f with play-ch-8~f */
76
-};
77
-
78
-struct dmaengine_dlp_runtime_data;
79
-struct dmaengine_dlp {
80
- struct device *dev;
81
- struct dma_chan *chan[SNDRV_PCM_STREAM_LAST + 1];
82
- const struct snd_dlp_config *config;
83
- struct snd_soc_component component;
84
- struct list_head ref_list;
85
- enum dlp_mode mode;
86
- struct dmaengine_dlp_runtime_data *pref;
87
- spinlock_t lock;
88
- spinlock_t pref_lock;
89
-};
90
-
91
-struct dmaengine_dlp_runtime_data {
92
- struct dmaengine_dlp *parent;
93
- struct dmaengine_dlp_runtime_data *ref;
94
- struct dma_chan *dma_chan;
95
- struct kref refcount;
96
- struct list_head node;
97
- dma_cookie_t cookie;
98
-
99
- char *buf;
100
- snd_pcm_uframes_t buf_sz;
101
- snd_pcm_uframes_t period_sz;
102
- snd_pcm_uframes_t hw_ptr;
103
- snd_pcm_sframes_t hw_ptr_delta; /* play-ptr - cap-ptr */
104
- unsigned long period_elapsed;
105
- unsigned int frame_bytes;
106
- unsigned int channels;
107
- unsigned int buf_ofs;
108
- int stream;
109
-};
110
-
111
-static inline void dlp_activate(struct dmaengine_dlp *dlp)
45
+static inline void drd_buf_free(struct dlp_runtime_data *drd)
11246 {
113
- spin_lock(&dlp->lock);
114
- dlp->component.active++;
115
- spin_unlock(&dlp->lock);
47
+ if (drd && drd->buf) {
48
+ dev_dbg(drd->parent->dev, "%s: stream[%d]: 0x%px\n",
49
+ __func__, drd->stream, drd->buf);
50
+ kvfree(drd->buf);
51
+ drd->buf = NULL;
52
+ }
11653 }
11754
118
-static inline void dlp_deactivate(struct dmaengine_dlp *dlp)
55
+static inline int drd_buf_alloc(struct dlp_runtime_data *drd, int size)
11956 {
120
- spin_lock(&dlp->lock);
121
- dlp->component.active--;
122
- spin_unlock(&dlp->lock);
57
+ if (drd) {
58
+ if (snd_BUG_ON(drd->buf))
59
+ return -EINVAL;
60
+
61
+ drd->buf = kvzalloc(size, GFP_KERNEL);
62
+ if (!drd->buf)
63
+ return -ENOMEM;
64
+ dev_dbg(drd->parent->dev, "%s: stream[%d]: 0x%px\n",
65
+ __func__, drd->stream, drd->buf);
66
+ }
67
+
68
+ return 0;
12369 }
12470
125
-static inline bool dlp_mode_channels_match(struct dmaengine_dlp *dlp,
71
+static inline void dlp_activate(struct dlp *dlp)
72
+{
73
+ atomic_inc(&dlp->active);
74
+}
75
+
76
+static inline void dlp_deactivate(struct dlp *dlp)
77
+{
78
+ atomic_dec(&dlp->active);
79
+}
80
+
81
+static inline bool dlp_mode_channels_match(struct dlp *dlp,
12682 int ch, int *expected)
12783 {
12884 *expected = 0;
....@@ -163,183 +119,135 @@
163119 }
164120 }
165121
166
-static inline ssize_t dlp_channels_to_bytes(struct dmaengine_dlp_runtime_data *prtd,
167
- int channels)
168
-{
169
- return (prtd->frame_bytes / prtd->channels) * channels;
170
-}
171
-
172
-static inline ssize_t dlp_frames_to_bytes(struct dmaengine_dlp_runtime_data *prtd,
173
- snd_pcm_sframes_t size)
174
-{
175
- return size * prtd->frame_bytes;
176
-}
177
-
178
-static inline snd_pcm_sframes_t dlp_bytes_to_frames(struct dmaengine_dlp_runtime_data *prtd,
179
- ssize_t size)
180
-{
181
- return size / prtd->frame_bytes;
182
-}
183
-
184
-static inline struct dmaengine_dlp *soc_component_to_dlp(struct snd_soc_component *p)
185
-{
186
- return container_of(p, struct dmaengine_dlp, component);
187
-}
188
-
189
-static inline struct dmaengine_dlp_runtime_data *substream_to_prtd(
190
- const struct snd_pcm_substream *substream)
191
-{
192
- if (!substream->runtime)
193
- return NULL;
194
-
195
- return substream->runtime->private_data;
196
-}
197
-
198
-static struct dma_chan *snd_dmaengine_dlp_get_chan(struct snd_pcm_substream *substream)
199
-{
200
- struct dmaengine_dlp_runtime_data *prtd = substream_to_prtd(substream);
201
-
202
- return prtd->dma_chan;
203
-}
204
-
205
-static struct device *dmaengine_dma_dev(struct dmaengine_dlp *dlp,
206
- struct snd_pcm_substream *substream)
207
-{
208
- if (!dlp->chan[substream->stream])
209
- return NULL;
210
-
211
- return dlp->chan[substream->stream]->device->dev;
212
-}
213
-
214
-static int dlp_get_offset_size(struct dmaengine_dlp_runtime_data *prtd,
122
+static int dlp_get_offset_size(struct dlp_runtime_data *drd,
215123 enum dlp_mode mode, int *ofs, int *size, bool *mix)
216124 {
217
- bool is_playback = prtd->stream == SNDRV_PCM_STREAM_PLAYBACK;
125
+ bool is_playback = drd->stream == SNDRV_PCM_STREAM_PLAYBACK;
218126 int ret = 0;
219127
220128 switch (mode) {
221129 case DLP_MODE_2CH_1LP_1MIC:
222130 *ofs = 0;
223
- *size = dlp_channels_to_bytes(prtd, 1);
131
+ *size = dlp_channels_to_bytes(drd, 1);
224132 break;
225133 case DLP_MODE_2CH_1MIC_1LP:
226
- *ofs = dlp_channels_to_bytes(prtd, 1);
227
- *size = dlp_channels_to_bytes(prtd, 1);
134
+ *ofs = dlp_channels_to_bytes(drd, 1);
135
+ *size = dlp_channels_to_bytes(drd, 1);
228136 break;
229137 case DLP_MODE_2CH_1MIC_1LP_MIX:
230138 if (is_playback) {
231139 *ofs = 0;
232
- *size = dlp_frames_to_bytes(prtd, 1);
140
+ *size = dlp_frames_to_bytes(drd, 1);
233141 if (mix)
234142 *mix = true;
235143 } else {
236
- *ofs = dlp_channels_to_bytes(prtd, 1);
237
- *size = dlp_channels_to_bytes(prtd, 1);
144
+ *ofs = dlp_channels_to_bytes(drd, 1);
145
+ *size = dlp_channels_to_bytes(drd, 1);
238146 }
239147 break;
240148 case DLP_MODE_2CH_2LP:
241149 *ofs = 0;
242
- *size = dlp_channels_to_bytes(prtd, 2);
150
+ *size = dlp_channels_to_bytes(drd, 2);
243151 break;
244152 case DLP_MODE_4CH_2MIC_2LP:
245153 if (is_playback) {
246154 *ofs = 0;
247
- *size = dlp_channels_to_bytes(prtd, 2);
155
+ *size = dlp_channels_to_bytes(drd, 2);
248156 } else {
249
- *ofs = dlp_channels_to_bytes(prtd, 2);
250
- *size = dlp_channels_to_bytes(prtd, 2);
157
+ *ofs = dlp_channels_to_bytes(drd, 2);
158
+ *size = dlp_channels_to_bytes(drd, 2);
251159 }
252160 break;
253161 case DLP_MODE_4CH_2MIC_1LP_MIX:
254162 if (is_playback) {
255163 *ofs = 0;
256
- *size = dlp_frames_to_bytes(prtd, 1);
164
+ *size = dlp_frames_to_bytes(drd, 1);
257165 if (mix)
258166 *mix = true;
259167 } else {
260
- *ofs = dlp_channels_to_bytes(prtd, 2);
261
- *size = dlp_channels_to_bytes(prtd, 1);
168
+ *ofs = dlp_channels_to_bytes(drd, 2);
169
+ *size = dlp_channels_to_bytes(drd, 1);
262170 }
263171 break;
264172 case DLP_MODE_4CH_4LP:
265173 *ofs = 0;
266
- *size = dlp_channels_to_bytes(prtd, 4);
174
+ *size = dlp_channels_to_bytes(drd, 4);
267175 break;
268176 case DLP_MODE_6CH_4MIC_2LP:
269177 if (is_playback) {
270178 *ofs = 0;
271
- *size = dlp_channels_to_bytes(prtd, 2);
179
+ *size = dlp_channels_to_bytes(drd, 2);
272180 } else {
273
- *ofs = dlp_channels_to_bytes(prtd, 4);
274
- *size = dlp_channels_to_bytes(prtd, 2);
181
+ *ofs = dlp_channels_to_bytes(drd, 4);
182
+ *size = dlp_channels_to_bytes(drd, 2);
275183 }
276184 break;
277185 case DLP_MODE_6CH_4MIC_1LP_MIX:
278186 if (is_playback) {
279187 *ofs = 0;
280
- *size = dlp_frames_to_bytes(prtd, 1);
188
+ *size = dlp_frames_to_bytes(drd, 1);
281189 if (mix)
282190 *mix = true;
283191 } else {
284
- *ofs = dlp_channels_to_bytes(prtd, 4);
285
- *size = dlp_channels_to_bytes(prtd, 1);
192
+ *ofs = dlp_channels_to_bytes(drd, 4);
193
+ *size = dlp_channels_to_bytes(drd, 1);
286194 }
287195 break;
288196 case DLP_MODE_6CH_6LP:
289197 *ofs = 0;
290
- *size = dlp_channels_to_bytes(prtd, 6);
198
+ *size = dlp_channels_to_bytes(drd, 6);
291199 break;
292200 case DLP_MODE_8CH_6MIC_2LP:
293201 if (is_playback) {
294202 *ofs = 0;
295
- *size = dlp_channels_to_bytes(prtd, 2);
203
+ *size = dlp_channels_to_bytes(drd, 2);
296204 } else {
297
- *ofs = dlp_channels_to_bytes(prtd, 6);
298
- *size = dlp_channels_to_bytes(prtd, 2);
205
+ *ofs = dlp_channels_to_bytes(drd, 6);
206
+ *size = dlp_channels_to_bytes(drd, 2);
299207 }
300208 break;
301209 case DLP_MODE_8CH_6MIC_1LP_MIX:
302210 if (is_playback) {
303211 *ofs = 0;
304
- *size = dlp_frames_to_bytes(prtd, 1);
212
+ *size = dlp_frames_to_bytes(drd, 1);
305213 if (mix)
306214 *mix = true;
307215 } else {
308
- *ofs = dlp_channels_to_bytes(prtd, 6);
309
- *size = dlp_channels_to_bytes(prtd, 1);
216
+ *ofs = dlp_channels_to_bytes(drd, 6);
217
+ *size = dlp_channels_to_bytes(drd, 1);
310218 }
311219 break;
312220 case DLP_MODE_8CH_8LP:
313221 *ofs = 0;
314
- *size = dlp_channels_to_bytes(prtd, 8);
222
+ *size = dlp_channels_to_bytes(drd, 8);
315223 break;
316224 case DLP_MODE_10CH_8MIC_2LP:
317225 if (is_playback) {
318226 *ofs = 0;
319
- *size = dlp_channels_to_bytes(prtd, 2);
227
+ *size = dlp_channels_to_bytes(drd, 2);
320228 } else {
321
- *ofs = dlp_channels_to_bytes(prtd, 8);
322
- *size = dlp_channels_to_bytes(prtd, 2);
229
+ *ofs = dlp_channels_to_bytes(drd, 8);
230
+ *size = dlp_channels_to_bytes(drd, 2);
323231 }
324232 break;
325233 case DLP_MODE_10CH_8MIC_1LP_MIX:
326234 if (is_playback) {
327235 *ofs = 0;
328
- *size = dlp_frames_to_bytes(prtd, 1);
236
+ *size = dlp_frames_to_bytes(drd, 1);
329237 if (mix)
330238 *mix = true;
331239 } else {
332
- *ofs = dlp_channels_to_bytes(prtd, 8);
333
- *size = dlp_channels_to_bytes(prtd, 1);
240
+ *ofs = dlp_channels_to_bytes(drd, 8);
241
+ *size = dlp_channels_to_bytes(drd, 1);
334242 }
335243 break;
336244 case DLP_MODE_16CH_8MIC_8LP:
337245 if (is_playback) {
338246 *ofs = 0;
339
- *size = dlp_channels_to_bytes(prtd, 8);
247
+ *size = dlp_channels_to_bytes(drd, 8);
340248 } else {
341
- *ofs = dlp_channels_to_bytes(prtd, 8);
342
- *size = dlp_channels_to_bytes(prtd, 8);
249
+ *ofs = dlp_channels_to_bytes(drd, 8);
250
+ *size = dlp_channels_to_bytes(drd, 8);
343251 }
344252 break;
345253 default:
....@@ -353,22 +261,22 @@
353261 return ret;
354262 }
355263
356
-static int dlp_mix_frame_buffer(struct dmaengine_dlp_runtime_data *prtd, void *buf)
264
+static int dlp_mix_frame_buffer(struct dlp_runtime_data *drd, void *buf)
357265 {
358
- int sample_bytes = dlp_channels_to_bytes(prtd, 1);
266
+ int sample_bytes = dlp_channels_to_bytes(drd, 1);
359267 int16_t *p16 = (int16_t *)buf, v16 = 0;
360268 int32_t *p32 = (int32_t *)buf, v32 = 0;
361269 int i = 0;
362270
363271 switch (sample_bytes) {
364272 case 2:
365
- for (i = 0; i < prtd->channels; i++)
366
- v16 += (p16[i] / prtd->channels);
273
+ for (i = 0; i < drd->channels; i++)
274
+ v16 += (p16[i] / drd->channels);
367275 p16[0] = v16;
368276 break;
369277 case 4:
370
- for (i = 0; i < prtd->channels; i++)
371
- v32 += (p32[i] / prtd->channels);
278
+ for (i = 0; i < drd->channels; i++)
279
+ v32 += (p32[i] / drd->channels);
372280 p32[0] = v32;
373281 break;
374282 default:
....@@ -378,17 +286,209 @@
378286 return 0;
379287 }
380288
381
-static int dmaengine_dlp_hw_params(struct snd_soc_component *component,
382
- struct snd_pcm_substream *substream,
383
- struct snd_pcm_hw_params *params)
289
+static inline int drd_init_from(struct dlp_runtime_data *drd, struct dlp_runtime_data *src)
384290 {
385
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
386
- struct dmaengine_dlp_runtime_data *prtd = substream_to_prtd(substream);
387
- struct dma_chan *chan = snd_dmaengine_dlp_get_chan(substream);
388
- struct dma_slave_config slave_config;
291
+ memset(drd, 0x0, sizeof(*drd));
292
+
293
+ drd->parent = src->parent;
294
+ drd->buf_sz = src->buf_sz;
295
+ drd->period_sz = src->period_sz;
296
+ drd->frame_bytes = src->frame_bytes;
297
+ drd->channels = src->channels;
298
+ drd->stream = src->stream;
299
+
300
+ INIT_LIST_HEAD(&drd->node);
301
+ kref_init(&drd->refcount);
302
+
303
+ dev_dbg(drd->parent->dev, "%s: drd: 0x%px\n", __func__, drd);
304
+
305
+ return 0;
306
+}
307
+
308
+static void drd_avl_list_add(struct dlp *dlp, struct dlp_runtime_data *drd)
309
+{
310
+ unsigned long flags;
311
+
312
+ spin_lock_irqsave(&dlp->lock, flags);
313
+ list_add(&drd->node, &dlp->drd_avl_list);
314
+ dlp->drd_avl_count++;
315
+ spin_unlock_irqrestore(&dlp->lock, flags);
316
+}
317
+
318
+static struct dlp_runtime_data *drd_avl_list_get(struct dlp *dlp)
319
+{
320
+ struct dlp_runtime_data *drd = NULL;
321
+ unsigned long flags;
322
+
323
+ spin_lock_irqsave(&dlp->lock, flags);
324
+ if (!list_empty(&dlp->drd_avl_list)) {
325
+ drd = list_first_entry(&dlp->drd_avl_list, struct dlp_runtime_data, node);
326
+ list_del(&drd->node);
327
+ dlp->drd_avl_count--;
328
+ }
329
+ spin_unlock_irqrestore(&dlp->lock, flags);
330
+
331
+ return drd;
332
+}
333
+
334
+static void drd_release(struct kref *ref)
335
+{
336
+ struct dlp_runtime_data *drd =
337
+ container_of(ref, struct dlp_runtime_data, refcount);
338
+
339
+ dev_dbg(drd->parent->dev, "%s: drd: 0x%px\n", __func__, drd);
340
+
341
+ drd_buf_free(drd);
342
+ /* move to available list */
343
+ drd_avl_list_add(drd->parent, drd);
344
+}
345
+
346
+static inline struct dlp_runtime_data *drd_get(struct dlp_runtime_data *drd)
347
+{
348
+ if (!drd)
349
+ return NULL;
350
+
351
+ return kref_get_unless_zero(&drd->refcount) ? drd : NULL;
352
+}
353
+
354
+static inline void drd_put(struct dlp_runtime_data *drd)
355
+{
356
+ if (!drd)
357
+ return;
358
+
359
+ kref_put(&drd->refcount, drd_release);
360
+}
361
+
362
+static void drd_rdy_list_add(struct dlp *dlp, struct dlp_runtime_data *drd)
363
+{
364
+ unsigned long flags;
365
+
366
+ spin_lock_irqsave(&dlp->lock, flags);
367
+ list_add(&drd->node, &dlp->drd_rdy_list);
368
+ spin_unlock_irqrestore(&dlp->lock, flags);
369
+}
370
+
371
+static struct dlp_runtime_data *drd_rdy_list_get(struct dlp *dlp)
372
+{
373
+ struct dlp_runtime_data *drd = NULL;
374
+ unsigned long flags;
375
+
376
+ spin_lock_irqsave(&dlp->lock, flags);
377
+ if (!list_empty(&dlp->drd_rdy_list)) {
378
+ /* the newest one */
379
+ drd = list_first_entry(&dlp->drd_rdy_list, struct dlp_runtime_data, node);
380
+ list_del(&drd->node);
381
+ }
382
+ spin_unlock_irqrestore(&dlp->lock, flags);
383
+
384
+ return drd;
385
+}
386
+
387
+static bool drd_rdy_list_found(struct dlp *dlp, struct dlp_runtime_data *drd)
388
+{
389
+ struct dlp_runtime_data *_drd;
390
+ unsigned long flags;
391
+ bool found = false;
392
+
393
+ if (!drd)
394
+ return false;
395
+
396
+ spin_lock_irqsave(&dlp->lock, flags);
397
+ list_for_each_entry(_drd, &dlp->drd_rdy_list, node) {
398
+ if (_drd == drd) {
399
+ found = true;
400
+ break;
401
+ }
402
+ }
403
+ spin_unlock_irqrestore(&dlp->lock, flags);
404
+
405
+ return found;
406
+}
407
+
408
+static void drd_rdy_list_free(struct dlp *dlp)
409
+{
410
+ struct list_head drd_list;
411
+ struct dlp_runtime_data *drd;
412
+ unsigned long flags;
413
+
414
+ spin_lock_irqsave(&dlp->lock, flags);
415
+ list_replace_init(&dlp->drd_rdy_list, &drd_list);
416
+ spin_unlock_irqrestore(&dlp->lock, flags);
417
+
418
+ while (!list_empty(&drd_list)) {
419
+ drd = list_first_entry(&drd_list, struct dlp_runtime_data, node);
420
+ list_del(&drd->node);
421
+ drd_put(drd);
422
+ }
423
+}
424
+
425
+static void drd_ref_list_add(struct dlp *dlp, struct dlp_runtime_data *drd)
426
+{
427
+ unsigned long flags;
428
+
429
+ /* push valid playback into ref list */
430
+ spin_lock_irqsave(&dlp->lock, flags);
431
+ list_add_tail(&drd->node, &dlp->drd_ref_list);
432
+ spin_unlock_irqrestore(&dlp->lock, flags);
433
+}
434
+
435
+static struct dlp_runtime_data *drd_ref_list_first(struct dlp *dlp)
436
+{
437
+ struct dlp_runtime_data *drd = NULL;
438
+ unsigned long flags;
439
+
440
+ spin_lock_irqsave(&dlp->lock, flags);
441
+ if (!list_empty(&dlp->drd_ref_list))
442
+ drd = list_first_entry(&dlp->drd_ref_list, struct dlp_runtime_data, node);
443
+ spin_unlock_irqrestore(&dlp->lock, flags);
444
+
445
+ return drd;
446
+}
447
+
448
+static struct dlp_runtime_data *drd_ref_list_del(struct dlp *dlp,
449
+ struct dlp_runtime_data *drd)
450
+{
451
+ unsigned long flags;
452
+
453
+ spin_lock_irqsave(&dlp->lock, flags);
454
+ list_del(&drd->node);
455
+ spin_unlock_irqrestore(&dlp->lock, flags);
456
+
457
+ return drd;
458
+}
459
+
460
+static void drd_ref_list_free(struct dlp *dlp)
461
+{
462
+ struct list_head drd_list;
463
+ struct dlp_runtime_data *drd;
464
+ unsigned long flags;
465
+
466
+ spin_lock_irqsave(&dlp->lock, flags);
467
+ list_replace_init(&dlp->drd_ref_list, &drd_list);
468
+ spin_unlock_irqrestore(&dlp->lock, flags);
469
+
470
+ while (!list_empty(&drd_list)) {
471
+ drd = list_first_entry(&drd_list, struct dlp_runtime_data, node);
472
+ list_del(&drd->node);
473
+
474
+ if (!atomic_read(&drd->stop))
475
+ drd_rdy_list_add(dlp, drd);
476
+ else
477
+ drd_put(drd);
478
+ }
479
+}
480
+
481
+int dlp_hw_params(struct snd_soc_component *component,
482
+ struct snd_pcm_substream *substream,
483
+ struct snd_pcm_hw_params *params)
484
+{
485
+ struct dlp *dlp = soc_component_to_dlp(component);
486
+ struct dlp_runtime_data *drd = substream_to_drd(substream);
389487 bool is_playback = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
390488 int ch_req = params_channels(params), ch_exp = 0;
391
- int ret;
489
+
490
+ if (unlikely(!dlp || !drd))
491
+ return -EINVAL;
392492
393493 /* mode should match to channels */
394494 if (!is_playback && !dlp_mode_channels_match(dlp, ch_req, &ch_exp)) {
....@@ -398,292 +498,150 @@
398498 return -EINVAL;
399499 }
400500
401
- memset(&slave_config, 0, sizeof(slave_config));
402
-
403
- ret = snd_dmaengine_pcm_prepare_slave_config(substream, params, &slave_config);
404
- if (ret)
405
- return ret;
406
-
407
- ret = dmaengine_slave_config(chan, &slave_config);
408
- if (ret)
409
- return ret;
410
-
411
- prtd->frame_bytes = snd_pcm_format_size(params_format(params),
412
- params_channels(params));
413
- prtd->period_sz = params_period_size(params);
414
- prtd->buf_sz = params_buffer_size(params);
415
- prtd->channels = params_channels(params);
501
+ drd->frame_bytes = snd_pcm_format_size(params_format(params),
502
+ params_channels(params));
503
+ drd->period_sz = params_period_size(params);
504
+ drd->buf_sz = params_buffer_size(params);
505
+ drd->channels = params_channels(params);
416506
417507 if (is_playback)
418
- prtd->buf_sz *= PBUF_CNT;
508
+ drd->buf_sz *= PBUF_CNT;
419509
420510 return 0;
421511 }
512
+EXPORT_SYMBOL_GPL(dlp_hw_params);
422513
423
-static int
424
-dmaengine_pcm_set_runtime_hwparams(struct snd_soc_component *component,
425
- struct snd_pcm_substream *substream)
514
+int dlp_open(struct dlp *dlp, struct dlp_runtime_data *drd,
515
+ struct snd_pcm_substream *substream)
426516 {
427
- struct snd_soc_pcm_runtime *rtd = asoc_substream_to_rtd(substream);
428
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
429
- struct device *dma_dev = dmaengine_dma_dev(dlp, substream);
430
- struct dma_chan *chan = dlp->chan[substream->stream];
431
- struct snd_dmaengine_dai_dma_data *dma_data;
432
- struct snd_pcm_hardware hw;
433
-
434
- if (rtd->num_cpus > 1) {
435
- dev_err(rtd->dev,
436
- "%s doesn't support Multi CPU yet\n", __func__);
517
+ if (unlikely(!dlp || !drd))
437518 return -EINVAL;
438
- }
439519
440
- dma_data = snd_soc_dai_get_dma_data(asoc_rtd_to_cpu(rtd, 0), substream);
520
+ drd->parent = dlp;
521
+ drd->stream = substream->stream;
441522
442
- memset(&hw, 0, sizeof(hw));
443
- hw.info = SNDRV_PCM_INFO_MMAP | SNDRV_PCM_INFO_MMAP_VALID |
444
- SNDRV_PCM_INFO_INTERLEAVED;
445
- hw.periods_min = 2;
446
- hw.periods_max = UINT_MAX;
447
- hw.period_bytes_min = 256;
448
- hw.period_bytes_max = dma_get_max_seg_size(dma_dev);
449
- hw.buffer_bytes_max = SIZE_MAX;
450
- hw.fifo_size = dma_data->fifo_size;
451
-
452
- /**
453
- * FIXME: Remove the return value check to align with the code
454
- * before adding snd_dmaengine_pcm_refine_runtime_hwparams
455
- * function.
456
- */
457
- snd_dmaengine_pcm_refine_runtime_hwparams(substream,
458
- dma_data,
459
- &hw,
460
- chan);
461
-
462
- return snd_soc_set_runtime_hwparams(substream, &hw);
463
-}
464
-
465
-static int dmaengine_dlp_open(struct snd_soc_component *component,
466
- struct snd_pcm_substream *substream)
467
-{
468
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
469
- struct dma_chan *chan = dlp->chan[substream->stream];
470
- struct dmaengine_dlp_runtime_data *prtd;
471
- int ret;
472
-
473
- if (!chan)
474
- return -ENXIO;
475
-
476
- ret = dmaengine_pcm_set_runtime_hwparams(component, substream);
477
- if (ret)
478
- return ret;
479
-
480
- ret = snd_pcm_hw_constraint_integer(substream->runtime,
481
- SNDRV_PCM_HW_PARAM_PERIODS);
482
- if (ret < 0)
483
- return ret;
484
-
485
- prtd = kzalloc(sizeof(*prtd), GFP_KERNEL);
486
- if (!prtd)
487
- return -ENOMEM;
488
-
489
- dlp_info("PRTD-CREATE: 0x%px (%s)\n",
490
- prtd, substream->stream ? "C" : "P");
491
-
492
- kref_init(&prtd->refcount);
493
- prtd->parent = dlp;
494
- prtd->stream = substream->stream;
495
- prtd->dma_chan = chan;
496
-
497
- substream->runtime->private_data = prtd;
523
+ substream->runtime->private_data = drd;
498524
499525 dlp_activate(dlp);
500526
501527 return 0;
502528 }
529
+EXPORT_SYMBOL_GPL(dlp_open);
503530
504
-static void dmaengine_free_prtd(struct kref *ref)
531
+int dlp_close(struct dlp *dlp, struct dlp_runtime_data *drd,
532
+ struct snd_pcm_substream *substream)
505533 {
506
- struct dmaengine_dlp_runtime_data *prtd =
507
- container_of(ref, struct dmaengine_dlp_runtime_data, refcount);
508
-
509
- dlp_info("PRTD-FREE: 0x%px\n", prtd);
510
-
511
- kfree(prtd->buf);
512
- kfree(prtd);
513
-}
514
-
515
-static void free_ref_list(struct snd_soc_component *component)
516
-{
517
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
518
- struct dmaengine_dlp_runtime_data *prtd, *_pt;
519
-
520
- spin_lock(&dlp->lock);
521
- list_for_each_entry_safe(prtd, _pt, &dlp->ref_list, node) {
522
- list_del(&prtd->node);
523
- kref_put(&prtd->refcount, dmaengine_free_prtd);
524
- }
525
- spin_unlock(&dlp->lock);
526
-}
527
-
528
-static int dmaengine_dlp_close(struct snd_soc_component *component,
529
- struct snd_pcm_substream *substream)
530
-{
531
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
532
- struct dmaengine_dlp_runtime_data *prtd = substream_to_prtd(substream);
533
-
534
- dmaengine_synchronize(prtd->dma_chan);
534
+ if (unlikely(!dlp || !drd))
535
+ return -EINVAL;
535536
536537 /*
537
- * kref put should be after hw_ptr updated when stop,
538
- * ops->trigger: SNDRV_PCM_TRIGGER_STOP -> ops->close
539
- * obviously, it is!
538
+ * In case: open -> hw_params -> prepare -> close flow
539
+ * should check and free all.
540540 */
541
- kref_put(&prtd->refcount, dmaengine_free_prtd);
541
+ if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
542
+ drd_put(dlp->drd_pb_shadow);
543
+ dlp->drd_pb_shadow = NULL;
544
+ } else {
545
+ drd_buf_free(drd);
546
+ }
542547
543548 dlp_deactivate(dlp);
544549
545550 return 0;
546551 }
552
+EXPORT_SYMBOL_GPL(dlp_close);
547553
548
-static snd_pcm_uframes_t dmaengine_dlp_pointer(
549
- struct snd_soc_component *component,
550
- struct snd_pcm_substream *substream)
554
+void dlp_dma_complete(struct dlp *dlp, struct dlp_runtime_data *drd)
551555 {
552
- struct dmaengine_dlp_runtime_data *prtd = substream_to_prtd(substream);
553
- struct dma_tx_state state;
554
- unsigned int buf_size;
555
- unsigned int pos = 0;
556
-
557
- dmaengine_tx_status(prtd->dma_chan, prtd->cookie, &state);
558
- buf_size = snd_pcm_lib_buffer_bytes(substream);
559
- if (state.residue > 0 && state.residue <= buf_size)
560
- pos = buf_size - state.residue;
561
-
562
- return dlp_bytes_to_frames(prtd, pos);
563
-}
564
-
565
-static void dmaengine_dlp_dma_complete(void *arg)
566
-{
567
- struct snd_pcm_substream *substream = arg;
568
- struct dmaengine_dlp_runtime_data *prtd = substream_to_prtd(substream);
569
- struct dmaengine_dlp *dlp = prtd->parent;
570
-
571
- if (!substream->runtime)
556
+ if (unlikely(!dlp || !drd))
572557 return;
573558
574
- spin_lock(&dlp->lock);
575
- prtd->period_elapsed++;
576
- prtd->hw_ptr = prtd->period_elapsed * prtd->period_sz;
577
- spin_unlock(&dlp->lock);
578
- snd_pcm_period_elapsed(substream);
559
+ atomic64_inc(&drd->period_elapsed);
579560 }
561
+EXPORT_SYMBOL_GPL(dlp_dma_complete);
580562
581
-static int dmaengine_dlp_prepare_and_submit(struct snd_pcm_substream *substream)
563
+int dlp_start(struct snd_soc_component *component,
564
+ struct snd_pcm_substream *substream,
565
+ struct device *dev,
566
+ dma_pointer_f dma_pointer)
582567 {
583
- struct dmaengine_dlp_runtime_data *prtd = substream_to_prtd(substream);
584
- struct dma_chan *chan = prtd->dma_chan;
585
- struct dma_async_tx_descriptor *desc;
586
- enum dma_transfer_direction direction;
587
- unsigned long flags = DMA_CTRL_ACK;
588
-
589
- direction = snd_pcm_substream_to_dma_direction(substream);
590
-
591
- if (!substream->runtime->no_period_wakeup)
592
- flags |= DMA_PREP_INTERRUPT;
593
-
594
- desc = dmaengine_prep_dma_cyclic(chan,
595
- substream->runtime->dma_addr,
596
- snd_pcm_lib_buffer_bytes(substream),
597
- snd_pcm_lib_period_bytes(substream), direction, flags);
598
-
599
- if (!desc)
600
- return -ENOMEM;
601
-
602
- desc->callback = dmaengine_dlp_dma_complete;
603
- desc->callback_param = substream;
604
- prtd->cookie = dmaengine_submit(desc);
605
-
606
- return 0;
607
-}
608
-
609
-static int dmaengine_dlp_setup(struct snd_soc_component *component,
610
- struct snd_pcm_substream *substream)
611
-{
612
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
568
+ struct dlp *dlp = soc_component_to_dlp(component);
613569 int bstream = SNDRV_PCM_STREAM_LAST - substream->stream;
614570 struct snd_pcm_str *bro = &substream->pcm->streams[bstream];
615571 struct snd_pcm_substream *bsubstream = bro->substream;
616
- struct dmaengine_dlp_runtime_data *prtd = substream_to_prtd(substream);
617
- struct dmaengine_dlp_runtime_data *brtd = substream_to_prtd(bsubstream);
618
- struct dmaengine_dlp_runtime_data *pref = dlp->pref;
572
+ struct dlp_runtime_data *adrd = substream_to_drd(substream);
573
+ struct dlp_runtime_data *bdrd = substream_to_drd(bsubstream);
574
+ struct dlp_runtime_data *drd_ref;
619575 bool is_playback = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
620
- snd_pcm_uframes_t a = 0, b = 0, fifo_a = 0, fifo_b = 0;
576
+ uint64_t a = 0, b = 0;
577
+ snd_pcm_uframes_t fifo_a = 0, fifo_b = 0;
621578 snd_pcm_sframes_t delta = 0;
579
+
580
+ if (unlikely(!dlp || !adrd || !dma_pointer))
581
+ return -EINVAL;
622582
623583 if (dlp->mode == DLP_MODE_DISABLED)
624584 return -EINVAL;
625585
626
- fifo_a = dlp->config->get_fifo_count(dlp->dev, substream->stream);
627
- a = dmaengine_dlp_pointer(component, substream);
586
+ fifo_a = dlp->config->get_fifo_count(dev, substream);
587
+ a = dma_pointer(component, substream) % adrd->period_sz;
628588
629589 if (bsubstream->runtime && snd_pcm_running(bsubstream)) {
630
- fifo_b = dlp->config->get_fifo_count(dlp->dev, bstream);
631
- b = dmaengine_dlp_pointer(component, bsubstream);
590
+ if (unlikely(!bdrd))
591
+ return -EINVAL;
632592
633
- spin_lock(&dlp->lock);
634
- if (!pref) {
635
- spin_unlock(&dlp->lock);
593
+ fifo_b = dlp->config->get_fifo_count(dev, bsubstream);
594
+ b = dma_pointer(component, bsubstream) % bdrd->period_sz;
595
+
596
+ drd_ref = drd_rdy_list_get(dlp);
597
+ if (unlikely(!drd_ref)) {
598
+ dev_err(dev, "Failed to get rdy drd\n");
636599 return -EINVAL;
637600 }
638601
639
- a = (prtd->period_elapsed * prtd->period_sz) + (a % prtd->period_sz);
640
- b = (brtd->period_elapsed * brtd->period_sz) + (b % brtd->period_sz);
602
+ a += (atomic64_read(&adrd->period_elapsed) * adrd->period_sz);
603
+ b += (atomic64_read(&bdrd->period_elapsed) * bdrd->period_sz);
641604
642
- fifo_a = dlp_bytes_to_frames(prtd, fifo_a * 4);
643
- fifo_b = dlp_bytes_to_frames(brtd, fifo_b * 4);
605
+ fifo_a = dlp_bytes_to_frames(adrd, fifo_a * 4);
606
+ fifo_b = dlp_bytes_to_frames(bdrd, fifo_b * 4);
644607
645608 delta = is_playback ? (a - fifo_a) - (b + fifo_b) : (b - fifo_b) - (a + fifo_a);
646609
647
- pref->hw_ptr_delta = delta;
648
- kref_get(&pref->refcount);
649
- /* push valid playback into ref list */
650
- list_add_tail(&pref->node, &dlp->ref_list);
610
+ drd_ref->hw_ptr_delta = delta;
651611
652
- spin_unlock(&dlp->lock);
612
+ drd_ref_list_add(dlp, drd_ref);
653613 }
654614
655615 if (is_playback)
656
- dlp_info("START-P: DMA-P: %lu, DMA-C: %lu, FIFO-P: %lu, FIFO-C: %lu, DELTA: %ld\n",
657
- a, b, fifo_a, fifo_b, delta);
616
+ dev_dbg(dev, "START-P: DMA-P: %llu, DMA-C: %llu, FIFO-P: %lu, FIFO-C: %lu, DELTA: %ld\n",
617
+ a, b, fifo_a, fifo_b, delta);
658618 else
659
- dlp_info("START-C: DMA-P: %lu, DMA-C: %lu, FIFO-P: %lu, FIFO-C: %lu, DELTA: %ld\n",
660
- b, a, fifo_b, fifo_a, delta);
619
+ dev_dbg(dev, "START-C: DMA-P: %llu, DMA-C: %llu, FIFO-P: %lu, FIFO-C: %lu, DELTA: %ld\n",
620
+ b, a, fifo_b, fifo_a, delta);
661621
662622 return 0;
663623 }
624
+EXPORT_SYMBOL_GPL(dlp_start);
664625
665
-static void dmaengine_dlp_release(struct snd_soc_component *component,
666
- struct snd_pcm_substream *substream)
626
+void dlp_stop(struct snd_soc_component *component,
627
+ struct snd_pcm_substream *substream,
628
+ dma_pointer_f dma_pointer)
667629 {
668
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
669
- struct dmaengine_dlp_runtime_data *prtd = substream_to_prtd(substream);
670
- struct dmaengine_dlp_runtime_data *pref = dlp->pref;
630
+ struct dlp *dlp = soc_component_to_dlp(component);
631
+ struct dlp_runtime_data *drd = substream_to_drd(substream);
671632 struct snd_pcm_runtime *runtime = substream->runtime;
672
- snd_pcm_uframes_t appl_ptr, hw_ptr;
633
+ uint64_t appl_ptr, hw_ptr;
634
+
635
+ if (unlikely(!dlp || !drd || !runtime || !dma_pointer))
636
+ return;
673637
674638 if (dlp->mode == DLP_MODE_DISABLED)
675639 return;
676640
677641 /* any data in FIFOs will be gone ,so don't care */
678642 appl_ptr = READ_ONCE(runtime->control->appl_ptr);
679
- hw_ptr = dmaengine_dlp_pointer(component, substream);
680
- spin_lock(&dlp->lock);
681
- hw_ptr = (prtd->period_elapsed * prtd->period_sz) + (hw_ptr % prtd->period_sz);
682
- if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
683
- pref->hw_ptr = min(hw_ptr, appl_ptr);
684
- prtd->period_elapsed = 0;
685
- prtd->hw_ptr = 0;
686
- spin_unlock(&dlp->lock);
643
+ hw_ptr = dma_pointer(component, substream) % drd->period_sz;
644
+ hw_ptr += (atomic64_read(&drd->period_elapsed) * drd->period_sz);
687645
688646 /*
689647 * playback:
....@@ -694,187 +652,98 @@
694652 * anyway, we should use the smaller one, obviously, it's hw_ptr.
695653 */
696654 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
697
- spin_lock(&dlp->pref_lock);
698
- kref_put(&pref->refcount, dmaengine_free_prtd);
699
- dlp->pref = NULL;
700
- spin_unlock(&dlp->pref_lock);
701
- dlp_info("STOP-P: applptr: %lu, hwptr: %lu\n", appl_ptr, hw_ptr);
655
+ if (dlp->drd_pb_shadow) {
656
+ dlp->drd_pb_shadow->hw_ptr = min(hw_ptr, appl_ptr);
657
+ atomic_set(&dlp->drd_pb_shadow->stop, 1);
658
+ }
659
+ drd_rdy_list_free(dlp);
702660 } else {
703661 /* free residue playback ref list for capture when stop */
704
- free_ref_list(component);
705
- dlp_info("STOP-C: applptr: %lu, hwptr: %lu\n", appl_ptr, hw_ptr);
706
- }
707
-}
708
-
709
-static int dmaengine_dlp_trigger(struct snd_soc_component *component,
710
- struct snd_pcm_substream *substream, int cmd)
711
-{
712
- struct dmaengine_dlp_runtime_data *prtd = substream_to_prtd(substream);
713
- struct snd_pcm_runtime *runtime = substream->runtime;
714
- int ret;
715
-
716
- switch (cmd) {
717
- case SNDRV_PCM_TRIGGER_START:
718
- ret = dmaengine_dlp_prepare_and_submit(substream);
719
- if (ret)
720
- return ret;
721
- dma_async_issue_pending(prtd->dma_chan);
722
- dmaengine_dlp_setup(component, substream);
723
- break;
724
- case SNDRV_PCM_TRIGGER_RESUME:
725
- case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
726
- dmaengine_resume(prtd->dma_chan);
727
- break;
728
- case SNDRV_PCM_TRIGGER_SUSPEND:
729
- if (runtime->info & SNDRV_PCM_INFO_PAUSE) {
730
- dmaengine_pause(prtd->dma_chan);
731
- } else {
732
- dmaengine_dlp_release(component, substream);
733
- dmaengine_terminate_async(prtd->dma_chan);
734
- }
735
- break;
736
- case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
737
- dmaengine_pause(prtd->dma_chan);
738
- break;
739
- case SNDRV_PCM_TRIGGER_STOP:
740
- dmaengine_dlp_release(component, substream);
741
- dmaengine_terminate_async(prtd->dma_chan);
742
- break;
743
- default:
744
- return -EINVAL;
662
+ drd_ref_list_free(dlp);
745663 }
746664
747
- return 0;
665
+ atomic64_set(&drd->period_elapsed, 0);
666
+
667
+ dev_dbg(dlp->dev, "STOP-%s: applptr: %llu, hwptr: %llu\n",
668
+ substream->stream ? "C" : "P", appl_ptr, hw_ptr);
748669 }
749
-
750
-static int dmaengine_dlp_new(struct snd_soc_component *component,
751
- struct snd_soc_pcm_runtime *rtd)
752
-{
753
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
754
- struct snd_pcm_substream *substream;
755
- size_t prealloc_buffer_size;
756
- size_t max_buffer_size;
757
- unsigned int i;
758
-
759
- prealloc_buffer_size = prealloc_buffer_size_kbytes * 1024;
760
- max_buffer_size = SIZE_MAX;
761
-
762
- for_each_pcm_streams(i) {
763
- substream = rtd->pcm->streams[i].substream;
764
- if (!substream)
765
- continue;
766
-
767
- if (!dlp->chan[i]) {
768
- dev_err(component->dev,
769
- "Missing dma channel for stream: %d\n", i);
770
- return -EINVAL;
771
- }
772
-
773
- snd_pcm_set_managed_buffer(substream,
774
- SNDRV_DMA_TYPE_DEV_IRAM,
775
- dmaengine_dma_dev(dlp, substream),
776
- prealloc_buffer_size,
777
- max_buffer_size);
778
-
779
- if (rtd->pcm->streams[i].pcm->name[0] == '\0') {
780
- strscpy_pad(rtd->pcm->streams[i].pcm->name,
781
- rtd->pcm->streams[i].pcm->id,
782
- sizeof(rtd->pcm->streams[i].pcm->name));
783
- }
784
- }
785
-
786
- return 0;
787
-}
788
-
789
-static struct dmaengine_dlp_runtime_data *get_ref(struct snd_soc_component *component)
790
-{
791
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
792
- struct dmaengine_dlp_runtime_data *pref = NULL;
793
-
794
- spin_lock(&dlp->lock);
795
- if (!list_empty(&dlp->ref_list)) {
796
- pref = list_first_entry(&dlp->ref_list, struct dmaengine_dlp_runtime_data, node);
797
- list_del(&pref->node);
798
- }
799
- spin_unlock(&dlp->lock);
800
-
801
- return pref;
802
-}
670
+EXPORT_SYMBOL_GPL(dlp_stop);
803671
804672 static int process_capture(struct snd_soc_component *component,
805673 struct snd_pcm_substream *substream,
806674 unsigned long hwoff,
807675 void __user *buf, unsigned long bytes)
808676 {
809
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
677
+ struct dlp *dlp = soc_component_to_dlp(component);
810678 struct snd_pcm_runtime *runtime = substream->runtime;
811
- struct dmaengine_dlp_runtime_data *prtd = substream_to_prtd(substream);
812
- struct dmaengine_dlp_runtime_data *pref = NULL;
813
- void *dma_ptr = runtime->dma_area + hwoff;
814
- snd_pcm_sframes_t frames = dlp_bytes_to_frames(prtd, bytes);
679
+ struct dlp_runtime_data *drd = substream_to_drd(substream);
680
+ struct dlp_runtime_data *drd_ref = NULL;
681
+ snd_pcm_sframes_t frames = 0;
815682 snd_pcm_sframes_t frames_consumed = 0, frames_residue = 0, frames_tmp = 0;
816683 snd_pcm_sframes_t ofs = 0;
817684 snd_pcm_uframes_t appl_ptr;
818
- char *cbuf = prtd->buf, *pbuf = NULL;
819685 int ofs_cap, ofs_play, size_cap, size_play;
820686 int i = 0, j = 0, ret = 0;
821687 bool free_ref = false, mix = false;
688
+ char *cbuf = NULL, *pbuf = NULL;
689
+ void *dma_ptr;
690
+
691
+ if (unlikely(!drd || !runtime || !buf))
692
+ return -EINVAL;
693
+
694
+ frames = dlp_bytes_to_frames(drd, bytes);
695
+ dma_ptr = runtime->dma_area + hwoff;
696
+ cbuf = drd->buf;
822697
823698 appl_ptr = READ_ONCE(runtime->control->appl_ptr);
824699
825700 memcpy(cbuf, dma_ptr, bytes);
826701 #ifdef DLP_DBG
827702 /* DBG: mark STUB in ch-REC for trace each read */
828
- memset(cbuf, 0x22, dlp_channels_to_bytes(prtd, 1));
703
+ memset(cbuf, 0x22, dlp_channels_to_bytes(drd, 1));
829704 #endif
830
- ret = dlp_get_offset_size(prtd, dlp->mode, &ofs_cap, &size_cap, NULL);
705
+ ret = dlp_get_offset_size(drd, dlp->mode, &ofs_cap, &size_cap, NULL);
831706 if (ret) {
832
- dlp_info("fail to get dlp cap offset\n");
707
+ dev_err(dlp->dev, "Failed to get dlp cap offset\n");
833708 return -EINVAL;
834709 }
835710
836711 /* clear channel-LP_CHN */
837712 for (i = 0; i < frames; i++) {
838
- cbuf = prtd->buf + dlp_frames_to_bytes(prtd, i) + ofs_cap;
713
+ cbuf = drd->buf + dlp_frames_to_bytes(drd, i) + ofs_cap;
839714 memset(cbuf, 0x0, size_cap);
840715 }
841716
842717 start:
843
- if (!prtd->ref)
844
- prtd->ref = get_ref(component);
845
- pref = prtd->ref;
846
-
847
- /* do nothing if play stop */
848
- if (!pref)
718
+ drd_ref = drd_get(drd_ref_list_first(dlp));
719
+ if (!drd_ref)
849720 return 0;
850721
851
- ret = dlp_get_offset_size(pref, dlp->mode, &ofs_play, &size_play, &mix);
722
+ ret = dlp_get_offset_size(drd_ref, dlp->mode, &ofs_play, &size_play, &mix);
852723 if (ret) {
853
- dlp_info("fail to get dlp play offset\n");
854
- return 0;
724
+ dev_err(dlp->dev, "Failed to get dlp play offset\n");
725
+ goto _drd_put;
855726 }
856727
857
- ofs = appl_ptr + pref->hw_ptr_delta;
728
+ ofs = appl_ptr + drd_ref->hw_ptr_delta;
858729
859730 /*
860
- * if playback stop, kref_put ref, and we can check this to
861
- * know if playback stopped, then free prtd->ref if data consumed.
862
- *
731
+ * if playback stop, process the data tail and then
732
+ * free drd_ref if data consumed.
863733 */
864
- if (kref_read(&pref->refcount) == 1) {
865
- if (ofs >= pref->hw_ptr) {
866
- kref_put(&pref->refcount, dmaengine_free_prtd);
867
- prtd->ref = NULL;
868
- return 0;
869
- } else if ((ofs + frames) > pref->hw_ptr) {
870
- dlp_info("applptr: %8lu, ofs': %7ld, refhwptr: %lu, frames: %lu (*)\n",
871
- appl_ptr, ofs, pref->hw_ptr, frames);
734
+ if (atomic_read(&drd_ref->stop)) {
735
+ if (ofs >= drd_ref->hw_ptr) {
736
+ drd_put(drd_ref_list_del(dlp, drd_ref));
737
+ goto _drd_put;
738
+ } else if ((ofs + frames) > drd_ref->hw_ptr) {
739
+ dev_dbg(dlp->dev, "applptr: %8lu, ofs': %7ld, refhwptr: %lld, frames: %ld (*)\n",
740
+ appl_ptr, ofs, drd_ref->hw_ptr, frames);
872741 /*
873742 * should ignore the data that after play stop
874743 * and care about if the next ref start in the
875744 * same window
876745 */
877
- frames_tmp = pref->hw_ptr - ofs;
746
+ frames_tmp = drd_ref->hw_ptr - ofs;
878747 frames_residue = frames - frames_tmp;
879748 frames = frames_tmp;
880749 free_ref = true;
....@@ -891,27 +760,29 @@
891760 *
892761 */
893762 if ((ofs + frames) <= 0)
894
- return 0;
763
+ goto _drd_put;
895764
896765 /* skip if ofs < 0 and fixup ofs */
897766 j = 0;
898767 if (ofs < 0) {
899
- dlp_info("applptr: %8lu, ofs: %8ld, frames: %lu (*)\n",
900
- appl_ptr, ofs, frames);
768
+ dev_dbg(dlp->dev, "applptr: %8lu, ofs: %8ld, frames: %ld (*)\n",
769
+ appl_ptr, ofs, frames);
901770 j = -ofs;
902771 frames += ofs;
903772 ofs = 0;
773
+ appl_ptr += j;
904774 }
905775
906
- ofs %= pref->buf_sz;
776
+ ofs %= drd_ref->buf_sz;
907777
908
- dlp_info("applptr: %8lu, ofs: %8ld, frames: %lu\n", appl_ptr, ofs, frames);
778
+ dev_dbg(dlp->dev, "applptr: %8lu, ofs: %8ld, frames: %5ld, refc: %u\n",
779
+ appl_ptr, ofs, frames, kref_read(&drd_ref->refcount));
909780
910781 for (i = 0; i < frames; i++, j++) {
911
- cbuf = prtd->buf + dlp_frames_to_bytes(prtd, j + frames_consumed) + ofs_cap;
912
- pbuf = pref->buf + dlp_frames_to_bytes(pref, ((i + ofs) % pref->buf_sz)) + ofs_play;
782
+ cbuf = drd->buf + dlp_frames_to_bytes(drd, j + frames_consumed) + ofs_cap;
783
+ pbuf = drd_ref->buf + dlp_frames_to_bytes(drd_ref, ((i + ofs) % drd_ref->buf_sz)) + ofs_play;
913784 if (mix)
914
- dlp_mix_frame_buffer(pref, pbuf);
785
+ dlp_mix_frame_buffer(drd_ref, pbuf);
915786 memcpy(cbuf, pbuf, size_cap);
916787 }
917788
....@@ -919,8 +790,9 @@
919790 frames_consumed += frames;
920791
921792 if (free_ref) {
922
- kref_put(&pref->refcount, dmaengine_free_prtd);
923
- prtd->ref = NULL;
793
+ drd_put(drd_ref_list_del(dlp, drd_ref));
794
+ drd_put(drd_ref);
795
+ drd_ref = NULL;
924796 free_ref = false;
925797 if (frames_residue) {
926798 frames = frames_residue;
....@@ -928,6 +800,10 @@
928800 goto start;
929801 }
930802 }
803
+
804
+_drd_put:
805
+ drd_put(drd_ref);
806
+ drd_ref = NULL;
931807
932808 return 0;
933809 }
....@@ -937,40 +813,37 @@
937813 unsigned long hwoff,
938814 void __user *buf, unsigned long bytes)
939815 {
940
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
941
- struct dmaengine_dlp_runtime_data *pref;
816
+ struct dlp *dlp = soc_component_to_dlp(component);
817
+ struct dlp_runtime_data *drd;
942818 char *pbuf;
943819 int ret = 0;
944820
945
- spin_lock(&dlp->pref_lock);
946
- pref = dlp->pref;
947
- if (!pref) {
948
- ret = -EFAULT;
949
- goto err_unlock;
950
- }
821
+ drd = drd_get(dlp->drd_pb_shadow);
822
+ if (!drd)
823
+ return 0;
951824
952
- pbuf = pref->buf + pref->buf_ofs;
825
+ pbuf = drd->buf + drd->buf_ofs;
953826
954827 if (copy_from_user(pbuf, buf, bytes)) {
955828 ret = -EFAULT;
956
- goto err_unlock;
829
+ goto err_put;
957830 }
958831
959
- pref->buf_ofs += bytes;
960
- pref->buf_ofs %= dlp_frames_to_bytes(pref, pref->buf_sz);
832
+ drd->buf_ofs += bytes;
833
+ drd->buf_ofs %= dlp_frames_to_bytes(drd, drd->buf_sz);
961834
962
-err_unlock:
963
- spin_unlock(&dlp->pref_lock);
835
+err_put:
836
+ drd_put(drd);
964837
965838 return ret;
966839 }
967840
968
-static int dmaengine_dlp_process(struct snd_soc_component *component,
969
- struct snd_pcm_substream *substream,
970
- unsigned long hwoff,
971
- void __user *buf, unsigned long bytes)
841
+static int dlp_process(struct snd_soc_component *component,
842
+ struct snd_pcm_substream *substream,
843
+ unsigned long hwoff,
844
+ void __user *buf, unsigned long bytes)
972845 {
973
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
846
+ struct dlp *dlp = soc_component_to_dlp(component);
974847 int ret = 0;
975848
976849 if (dlp->mode == DLP_MODE_DISABLED)
....@@ -984,25 +857,30 @@
984857 return ret;
985858 }
986859
987
-static int dmaengine_dlp_copy_user(struct snd_soc_component *component,
988
- struct snd_pcm_substream *substream,
989
- int channel, unsigned long hwoff,
990
- void __user *buf, unsigned long bytes)
860
+int dlp_copy_user(struct snd_soc_component *component,
861
+ struct snd_pcm_substream *substream,
862
+ int channel, unsigned long hwoff,
863
+ void __user *buf, unsigned long bytes)
991864 {
992
- struct dmaengine_dlp_runtime_data *prtd = substream_to_prtd(substream);
865
+ struct dlp_runtime_data *drd = substream_to_drd(substream);
993866 struct snd_pcm_runtime *runtime = substream->runtime;
994867 bool is_playback = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
995
- void *dma_ptr = runtime->dma_area + hwoff +
996
- channel * (runtime->dma_bytes / runtime->channels);
868
+ void *dma_ptr;
997869 int ret;
870
+
871
+ if (unlikely(!drd || !runtime || !buf))
872
+ return -EINVAL;
873
+
874
+ dma_ptr = runtime->dma_area + hwoff +
875
+ channel * (runtime->dma_bytes / runtime->channels);
998876
999877 if (is_playback)
1000878 if (copy_from_user(dma_ptr, buf, bytes))
1001879 return -EFAULT;
1002880
1003
- ret = dmaengine_dlp_process(component, substream, hwoff, buf, bytes);
881
+ ret = dlp_process(component, substream, hwoff, buf, bytes);
1004882 if (!ret)
1005
- dma_ptr = prtd->buf;
883
+ dma_ptr = drd->buf;
1006884
1007885 if (!is_playback)
1008886 if (copy_to_user(buf, dma_ptr, bytes))
....@@ -1010,30 +888,33 @@
1010888
1011889 return 0;
1012890 }
891
+EXPORT_SYMBOL_GPL(dlp_copy_user);
1013892
1014893 static SOC_ENUM_SINGLE_EXT_DECL(dlp_mode, dlp_text);
1015894
1016
-static int dmaengine_dlp_mode_get(struct snd_kcontrol *kcontrol,
1017
- struct snd_ctl_elem_value *ucontrol)
895
+static int dlp_mode_get(struct snd_kcontrol *kcontrol,
896
+ struct snd_ctl_elem_value *ucontrol)
1018897 {
1019898 struct snd_soc_component *component = snd_soc_kcontrol_component(kcontrol);
1020
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
899
+ struct dlp *dlp = soc_component_to_dlp(component);
1021900
1022901 ucontrol->value.enumerated.item[0] = dlp->mode;
1023902
1024903 return 0;
1025904 }
1026905
1027
-static int dmaengine_dlp_mode_put(struct snd_kcontrol *kcontrol,
1028
- struct snd_ctl_elem_value *ucontrol)
906
+static int dlp_mode_put(struct snd_kcontrol *kcontrol,
907
+ struct snd_ctl_elem_value *ucontrol)
1029908 {
1030909 struct snd_soc_component *component = snd_soc_kcontrol_component(kcontrol);
1031
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
910
+ struct dlp *dlp = soc_component_to_dlp(component);
1032911 unsigned int mode = ucontrol->value.enumerated.item[0];
1033912
1034913 /* MUST: do not update mode while stream is running */
1035
- if (snd_soc_component_active(component))
914
+ if (atomic_read(&dlp->active)) {
915
+ dev_err(dlp->dev, "Should set this mode before pcm open\n");
1036916 return -EPERM;
917
+ }
1037918
1038919 if (mode == dlp->mode)
1039920 return 0;
....@@ -1043,207 +924,141 @@
1043924 return 1;
1044925 }
1045926
1046
-static const struct snd_kcontrol_new dmaengine_dlp_controls[] = {
927
+static const struct snd_kcontrol_new dlp_controls[] = {
1047928 SOC_ENUM_EXT("Software Digital Loopback Mode", dlp_mode,
1048
- dmaengine_dlp_mode_get,
1049
- dmaengine_dlp_mode_put),
929
+ dlp_mode_get, dlp_mode_put),
1050930 };
1051931
1052
-static int dmaengine_dlp_prepare(struct snd_soc_component *component,
1053
- struct snd_pcm_substream *substream)
932
+int dlp_prepare(struct snd_soc_component *component,
933
+ struct snd_pcm_substream *substream)
1054934 {
1055
- struct dmaengine_dlp *dlp = soc_component_to_dlp(component);
1056
- struct dmaengine_dlp_runtime_data *prtd = substream_to_prtd(substream);
1057
- struct dmaengine_dlp_runtime_data *pref = NULL;
1058
- int buf_bytes = dlp_frames_to_bytes(prtd, prtd->buf_sz);
1059
-
1060
- if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
1061
- pref = kmemdup(prtd, sizeof(*prtd), GFP_KERNEL);
1062
- if (!pref)
1063
- return -ENOMEM;
1064
-
1065
- kref_init(&pref->refcount);
1066
- pref->buf_ofs = 0;
1067
- pref->buf = kzalloc(buf_bytes, GFP_KERNEL);
1068
- if (!pref->buf) {
1069
- kfree(pref);
1070
- return -ENOMEM;
1071
- }
1072
-
1073
- spin_lock(&dlp->pref_lock);
1074
- dlp->pref = pref;
1075
- spin_unlock(&dlp->pref_lock);
1076
- dlp_info("PREF-CREATE: 0x%px\n", pref);
1077
- } else {
1078
- prtd->buf = kzalloc(buf_bytes, GFP_KERNEL);
1079
- if (!prtd->buf)
1080
- return -ENOMEM;
1081
- }
1082
-
1083
- return 0;
1084
-}
1085
-static const struct snd_soc_component_driver dmaengine_dlp_component = {
1086
- .name = SND_DMAENGINE_DLP_DRV_NAME,
1087
- .probe_order = SND_SOC_COMP_ORDER_LATE,
1088
- .open = dmaengine_dlp_open,
1089
- .close = dmaengine_dlp_close,
1090
- .hw_params = dmaengine_dlp_hw_params,
1091
- .prepare = dmaengine_dlp_prepare,
1092
- .trigger = dmaengine_dlp_trigger,
1093
- .pointer = dmaengine_dlp_pointer,
1094
- .copy_user = dmaengine_dlp_copy_user,
1095
- .pcm_construct = dmaengine_dlp_new,
1096
- .controls = dmaengine_dlp_controls,
1097
- .num_controls = ARRAY_SIZE(dmaengine_dlp_controls),
1098
-};
1099
-
1100
-static const char * const dmaengine_pcm_dma_channel_names[] = {
1101
- [SNDRV_PCM_STREAM_PLAYBACK] = "tx",
1102
- [SNDRV_PCM_STREAM_CAPTURE] = "rx",
1103
-};
1104
-
1105
-static int dmaengine_pcm_request_chan_of(struct dmaengine_dlp *dlp,
1106
- struct device *dev, const struct snd_dmaengine_pcm_config *config)
1107
-{
1108
- unsigned int i;
1109
- const char *name;
1110
- struct dma_chan *chan;
1111
-
1112
- for_each_pcm_streams(i) {
1113
- name = dmaengine_pcm_dma_channel_names[i];
1114
- chan = dma_request_chan(dev, name);
1115
- if (IS_ERR(chan)) {
1116
- /*
1117
- * Only report probe deferral errors, channels
1118
- * might not be present for devices that
1119
- * support only TX or only RX.
1120
- */
1121
- if (PTR_ERR(chan) == -EPROBE_DEFER)
1122
- return -EPROBE_DEFER;
1123
- dlp->chan[i] = NULL;
1124
- } else {
1125
- dlp->chan[i] = chan;
1126
- }
1127
- }
1128
-
1129
- return 0;
1130
-}
1131
-
1132
-static void dmaengine_pcm_release_chan(struct dmaengine_dlp *dlp)
1133
-{
1134
- unsigned int i;
1135
-
1136
- for_each_pcm_streams(i) {
1137
- if (!dlp->chan[i])
1138
- continue;
1139
- dma_release_channel(dlp->chan[i]);
1140
- }
1141
-}
1142
-
1143
-/**
1144
- * snd_dmaengine_dlp_register - Register a dmaengine based DLP device
1145
- * @dev: The parent device for the DLP device
1146
- * @config: Platform specific DLP configuration
1147
- */
1148
-static int snd_dmaengine_dlp_register(struct device *dev,
1149
- const struct snd_dlp_config *config)
1150
-{
1151
- const struct snd_soc_component_driver *driver;
1152
- struct dmaengine_dlp *dlp;
935
+ struct dlp *dlp = soc_component_to_dlp(component);
936
+ struct dlp_runtime_data *drd = substream_to_drd(substream);
937
+ struct dlp_runtime_data *drd_new = NULL;
938
+ int buf_bytes, last_buf_bytes;
1153939 int ret;
1154940
1155
- dlp = kzalloc(sizeof(*dlp), GFP_KERNEL);
1156
- if (!dlp)
1157
- return -ENOMEM;
941
+ if (unlikely(!dlp || !drd))
942
+ return -EINVAL;
943
+
944
+ if (dlp->mode == DLP_MODE_DISABLED)
945
+ return 0;
946
+
947
+ buf_bytes = dlp_frames_to_bytes(drd, drd->buf_sz);
948
+ last_buf_bytes = dlp_frames_to_bytes(drd, drd->last_buf_sz);
949
+
950
+ if (substream->runtime->status->state == SNDRV_PCM_STATE_XRUN)
951
+ dev_dbg(dlp->dev, "stream[%d]: prepare from XRUN\n",
952
+ substream->stream);
953
+
954
+ if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
955
+ dev_dbg(dlp->dev, "avl count: %d\n", dlp->drd_avl_count);
956
+ if (snd_BUG_ON(!dlp->drd_avl_count))
957
+ return -EINVAL;
958
+
959
+ /*
960
+ * There might be multiple calls hw_params -> prepare
961
+ * before start stream, so, should check buf size status
962
+ * to determine whether to re-create buf or do nothing.
963
+ */
964
+ if (drd_rdy_list_found(dlp, dlp->drd_pb_shadow)) {
965
+ if (buf_bytes == last_buf_bytes)
966
+ return 0;
967
+
968
+ drd_rdy_list_free(dlp);
969
+ }
970
+
971
+ /* release the old one, re-create for new params */
972
+ drd_put(dlp->drd_pb_shadow);
973
+ dlp->drd_pb_shadow = NULL;
974
+
975
+ drd_new = drd_avl_list_get(dlp);
976
+ if (!drd_new)
977
+ return -ENOMEM;
978
+
979
+ drd_init_from(drd_new, drd);
980
+
981
+ ret = drd_buf_alloc(drd_new, buf_bytes);
982
+ if (ret)
983
+ return -ENOMEM;
984
+
985
+ if (snd_BUG_ON(!drd_get(drd_new)))
986
+ return -EINVAL;
987
+
988
+ drd_rdy_list_add(dlp, drd_new);
989
+
990
+ dlp->drd_pb_shadow = drd_new;
991
+ } else {
992
+ /*
993
+ * There might be multiple calls hw_params -> prepare
994
+ * before start stream, so, should check buf size status
995
+ * to determine whether to re-create buf or do nothing.
996
+ */
997
+ if (drd->buf && buf_bytes == last_buf_bytes)
998
+ return 0;
999
+
1000
+ drd_buf_free(drd);
1001
+
1002
+ ret = drd_buf_alloc(drd, buf_bytes);
1003
+ if (ret)
1004
+ return ret;
1005
+ }
1006
+
1007
+ /* update last after all done success */
1008
+ drd->last_buf_sz = drd->buf_sz;
1009
+
1010
+ return 0;
1011
+}
1012
+EXPORT_SYMBOL_GPL(dlp_prepare);
1013
+
1014
+int dlp_probe(struct snd_soc_component *component)
1015
+{
1016
+ snd_soc_add_component_controls(component, dlp_controls,
1017
+ ARRAY_SIZE(dlp_controls));
1018
+ return 0;
1019
+}
1020
+EXPORT_SYMBOL_GPL(dlp_probe);
1021
+
1022
+int dlp_register(struct dlp *dlp, struct device *dev,
1023
+ const struct snd_soc_component_driver *driver,
1024
+ const struct snd_dlp_config *config)
1025
+{
1026
+ struct dlp_runtime_data *drd;
1027
+ int ret = 0, i = 0;
1028
+
1029
+ if (unlikely(!dlp || !dev || !driver || !config))
1030
+ return -EINVAL;
11581031
11591032 dlp->dev = dev;
11601033 dlp->config = config;
11611034
1162
- INIT_LIST_HEAD(&dlp->ref_list);
1163
- spin_lock_init(&dlp->lock);
1164
- spin_lock_init(&dlp->pref_lock);
1165
-
11661035 #ifdef CONFIG_DEBUG_FS
11671036 dlp->component.debugfs_prefix = "dma";
11681037 #endif
1169
- ret = dmaengine_pcm_request_chan_of(dlp, dev, NULL);
1170
- if (ret)
1171
- goto err_free_dma;
1038
+ INIT_LIST_HEAD(&dlp->drd_avl_list);
1039
+ INIT_LIST_HEAD(&dlp->drd_rdy_list);
1040
+ INIT_LIST_HEAD(&dlp->drd_ref_list);
11721041
1173
- driver = &dmaengine_dlp_component;
1042
+ dlp->drd_avl_count = ARRAY_SIZE(dlp->drds);
1043
+
1044
+ for (i = 0; i < dlp->drd_avl_count; i++) {
1045
+ drd = &dlp->drds[i];
1046
+ list_add_tail(&drd->node, &dlp->drd_avl_list);
1047
+ }
1048
+
1049
+ spin_lock_init(&dlp->lock);
1050
+ atomic_set(&dlp->active, 0);
11741051
11751052 ret = snd_soc_component_initialize(&dlp->component, driver, dev);
11761053 if (ret)
1177
- goto err_free_dma;
1054
+ return ret;
11781055
11791056 ret = snd_soc_add_component(&dlp->component, NULL, 0);
1180
- if (ret)
1181
- goto err_free_dma;
1182
-
1183
- return 0;
1184
-
1185
-err_free_dma:
1186
- dmaengine_pcm_release_chan(dlp);
1187
- kfree(dlp);
1188
- return ret;
1189
-}
1190
-
1191
-/**
1192
- * snd_dmaengine_dlp_unregister - Removes a dmaengine based DLP device
1193
- * @dev: Parent device the DLP was register with
1194
- *
1195
- * Removes a dmaengine based DLP device previously registered with
1196
- * snd_dmaengine_dlp_register.
1197
- */
1198
-static void snd_dmaengine_dlp_unregister(struct device *dev)
1199
-{
1200
- struct snd_soc_component *component;
1201
- struct dmaengine_dlp *dlp;
1202
-
1203
- component = snd_soc_lookup_component(dev, SND_DMAENGINE_DLP_DRV_NAME);
1204
- if (!component)
1205
- return;
1206
-
1207
- dlp = soc_component_to_dlp(component);
1208
-
1209
- snd_soc_unregister_component_by_driver(dev, component->driver);
1210
- dmaengine_pcm_release_chan(dlp);
1211
- kfree(dlp);
1212
-}
1213
-
1214
-static void devm_dmaengine_dlp_release(struct device *dev, void *res)
1215
-{
1216
- snd_dmaengine_dlp_unregister(*(struct device **)res);
1217
-}
1218
-
1219
-/**
1220
- * devm_snd_dmaengine_dlp_register - resource managed dmaengine DLP registration
1221
- * @dev: The parent device for the DLP device
1222
- * @config: Platform specific DLP configuration
1223
- *
1224
- * Register a dmaengine based DLP device with automatic unregistration when the
1225
- * device is unregistered.
1226
- */
1227
-int devm_snd_dmaengine_dlp_register(struct device *dev,
1228
- const struct snd_dlp_config *config)
1229
-{
1230
- struct device **ptr;
1231
- int ret;
1232
-
1233
- ptr = devres_alloc(devm_dmaengine_dlp_release, sizeof(*ptr), GFP_KERNEL);
1234
- if (!ptr)
1235
- return -ENOMEM;
1236
-
1237
- ret = snd_dmaengine_dlp_register(dev, config);
1238
- if (ret == 0) {
1239
- *ptr = dev;
1240
- devres_add(dev, ptr);
1241
- } else {
1242
- devres_free(ptr);
1243
- }
12441057
12451058 return ret;
12461059 }
1247
-EXPORT_SYMBOL_GPL(devm_snd_dmaengine_dlp_register);
1060
+EXPORT_SYMBOL_GPL(dlp_register);
12481061
1062
+MODULE_DESCRIPTION("Rockchip Digital Loopback Core Driver");
1063
+MODULE_AUTHOR("Sugar Zhang <sugar.zhang@rock-chips.com>");
12491064 MODULE_LICENSE("GPL");