GNU Linux-libre 5.10.215-gnu1
[releases.git] / drivers / dma / stm32-mdma.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  *
4  * Copyright (C) STMicroelectronics SA 2017
5  * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
6  *            Pierre-Yves Mordret <pierre-yves.mordret@st.com>
7  *
8  * Driver for STM32 MDMA controller
9  *
10  * Inspired by stm32-dma.c and dma-jz4780.c
11  */
12
13 #include <linux/clk.h>
14 #include <linux/delay.h>
15 #include <linux/dmaengine.h>
16 #include <linux/dma-mapping.h>
17 #include <linux/dmapool.h>
18 #include <linux/err.h>
19 #include <linux/init.h>
20 #include <linux/iopoll.h>
21 #include <linux/jiffies.h>
22 #include <linux/list.h>
23 #include <linux/log2.h>
24 #include <linux/module.h>
25 #include <linux/of.h>
26 #include <linux/of_device.h>
27 #include <linux/of_dma.h>
28 #include <linux/platform_device.h>
29 #include <linux/pm_runtime.h>
30 #include <linux/reset.h>
31 #include <linux/slab.h>
32
33 #include "virt-dma.h"
34
35 /*  MDMA Generic getter/setter */
36 #define STM32_MDMA_SHIFT(n)             (ffs(n) - 1)
37 #define STM32_MDMA_SET(n, mask)         (((n) << STM32_MDMA_SHIFT(mask)) & \
38                                          (mask))
39 #define STM32_MDMA_GET(n, mask)         (((n) & (mask)) >> \
40                                          STM32_MDMA_SHIFT(mask))
41
42 #define STM32_MDMA_GISR0                0x0000 /* MDMA Int Status Reg 1 */
43
44 /* MDMA Channel x interrupt/status register */
45 #define STM32_MDMA_CISR(x)              (0x40 + 0x40 * (x)) /* x = 0..62 */
46 #define STM32_MDMA_CISR_CRQA            BIT(16)
47 #define STM32_MDMA_CISR_TCIF            BIT(4)
48 #define STM32_MDMA_CISR_BTIF            BIT(3)
49 #define STM32_MDMA_CISR_BRTIF           BIT(2)
50 #define STM32_MDMA_CISR_CTCIF           BIT(1)
51 #define STM32_MDMA_CISR_TEIF            BIT(0)
52
53 /* MDMA Channel x interrupt flag clear register */
54 #define STM32_MDMA_CIFCR(x)             (0x44 + 0x40 * (x))
55 #define STM32_MDMA_CIFCR_CLTCIF         BIT(4)
56 #define STM32_MDMA_CIFCR_CBTIF          BIT(3)
57 #define STM32_MDMA_CIFCR_CBRTIF         BIT(2)
58 #define STM32_MDMA_CIFCR_CCTCIF         BIT(1)
59 #define STM32_MDMA_CIFCR_CTEIF          BIT(0)
60 #define STM32_MDMA_CIFCR_CLEAR_ALL      (STM32_MDMA_CIFCR_CLTCIF \
61                                         | STM32_MDMA_CIFCR_CBTIF \
62                                         | STM32_MDMA_CIFCR_CBRTIF \
63                                         | STM32_MDMA_CIFCR_CCTCIF \
64                                         | STM32_MDMA_CIFCR_CTEIF)
65
66 /* MDMA Channel x error status register */
67 #define STM32_MDMA_CESR(x)              (0x48 + 0x40 * (x))
68 #define STM32_MDMA_CESR_BSE             BIT(11)
69 #define STM32_MDMA_CESR_ASR             BIT(10)
70 #define STM32_MDMA_CESR_TEMD            BIT(9)
71 #define STM32_MDMA_CESR_TELD            BIT(8)
72 #define STM32_MDMA_CESR_TED             BIT(7)
73 #define STM32_MDMA_CESR_TEA_MASK        GENMASK(6, 0)
74
75 /* MDMA Channel x control register */
76 #define STM32_MDMA_CCR(x)               (0x4C + 0x40 * (x))
77 #define STM32_MDMA_CCR_SWRQ             BIT(16)
78 #define STM32_MDMA_CCR_WEX              BIT(14)
79 #define STM32_MDMA_CCR_HEX              BIT(13)
80 #define STM32_MDMA_CCR_BEX              BIT(12)
81 #define STM32_MDMA_CCR_PL_MASK          GENMASK(7, 6)
82 #define STM32_MDMA_CCR_PL(n)            STM32_MDMA_SET(n, \
83                                                        STM32_MDMA_CCR_PL_MASK)
84 #define STM32_MDMA_CCR_TCIE             BIT(5)
85 #define STM32_MDMA_CCR_BTIE             BIT(4)
86 #define STM32_MDMA_CCR_BRTIE            BIT(3)
87 #define STM32_MDMA_CCR_CTCIE            BIT(2)
88 #define STM32_MDMA_CCR_TEIE             BIT(1)
89 #define STM32_MDMA_CCR_EN               BIT(0)
90 #define STM32_MDMA_CCR_IRQ_MASK         (STM32_MDMA_CCR_TCIE \
91                                         | STM32_MDMA_CCR_BTIE \
92                                         | STM32_MDMA_CCR_BRTIE \
93                                         | STM32_MDMA_CCR_CTCIE \
94                                         | STM32_MDMA_CCR_TEIE)
95
96 /* MDMA Channel x transfer configuration register */
97 #define STM32_MDMA_CTCR(x)              (0x50 + 0x40 * (x))
98 #define STM32_MDMA_CTCR_BWM             BIT(31)
99 #define STM32_MDMA_CTCR_SWRM            BIT(30)
100 #define STM32_MDMA_CTCR_TRGM_MSK        GENMASK(29, 28)
101 #define STM32_MDMA_CTCR_TRGM(n)         STM32_MDMA_SET((n), \
102                                                        STM32_MDMA_CTCR_TRGM_MSK)
103 #define STM32_MDMA_CTCR_TRGM_GET(n)     STM32_MDMA_GET((n), \
104                                                        STM32_MDMA_CTCR_TRGM_MSK)
105 #define STM32_MDMA_CTCR_PAM_MASK        GENMASK(27, 26)
106 #define STM32_MDMA_CTCR_PAM(n)          STM32_MDMA_SET(n, \
107                                                        STM32_MDMA_CTCR_PAM_MASK)
108 #define STM32_MDMA_CTCR_PKE             BIT(25)
109 #define STM32_MDMA_CTCR_TLEN_MSK        GENMASK(24, 18)
110 #define STM32_MDMA_CTCR_TLEN(n)         STM32_MDMA_SET((n), \
111                                                        STM32_MDMA_CTCR_TLEN_MSK)
112 #define STM32_MDMA_CTCR_TLEN_GET(n)     STM32_MDMA_GET((n), \
113                                                        STM32_MDMA_CTCR_TLEN_MSK)
114 #define STM32_MDMA_CTCR_LEN2_MSK        GENMASK(25, 18)
115 #define STM32_MDMA_CTCR_LEN2(n)         STM32_MDMA_SET((n), \
116                                                        STM32_MDMA_CTCR_LEN2_MSK)
117 #define STM32_MDMA_CTCR_LEN2_GET(n)     STM32_MDMA_GET((n), \
118                                                        STM32_MDMA_CTCR_LEN2_MSK)
119 #define STM32_MDMA_CTCR_DBURST_MASK     GENMASK(17, 15)
120 #define STM32_MDMA_CTCR_DBURST(n)       STM32_MDMA_SET(n, \
121                                                     STM32_MDMA_CTCR_DBURST_MASK)
122 #define STM32_MDMA_CTCR_SBURST_MASK     GENMASK(14, 12)
123 #define STM32_MDMA_CTCR_SBURST(n)       STM32_MDMA_SET(n, \
124                                                     STM32_MDMA_CTCR_SBURST_MASK)
125 #define STM32_MDMA_CTCR_DINCOS_MASK     GENMASK(11, 10)
126 #define STM32_MDMA_CTCR_DINCOS(n)       STM32_MDMA_SET((n), \
127                                                     STM32_MDMA_CTCR_DINCOS_MASK)
128 #define STM32_MDMA_CTCR_SINCOS_MASK     GENMASK(9, 8)
129 #define STM32_MDMA_CTCR_SINCOS(n)       STM32_MDMA_SET((n), \
130                                                     STM32_MDMA_CTCR_SINCOS_MASK)
131 #define STM32_MDMA_CTCR_DSIZE_MASK      GENMASK(7, 6)
132 #define STM32_MDMA_CTCR_DSIZE(n)        STM32_MDMA_SET(n, \
133                                                      STM32_MDMA_CTCR_DSIZE_MASK)
134 #define STM32_MDMA_CTCR_SSIZE_MASK      GENMASK(5, 4)
135 #define STM32_MDMA_CTCR_SSIZE(n)        STM32_MDMA_SET(n, \
136                                                      STM32_MDMA_CTCR_SSIZE_MASK)
137 #define STM32_MDMA_CTCR_DINC_MASK       GENMASK(3, 2)
138 #define STM32_MDMA_CTCR_DINC(n)         STM32_MDMA_SET((n), \
139                                                       STM32_MDMA_CTCR_DINC_MASK)
140 #define STM32_MDMA_CTCR_SINC_MASK       GENMASK(1, 0)
141 #define STM32_MDMA_CTCR_SINC(n)         STM32_MDMA_SET((n), \
142                                                       STM32_MDMA_CTCR_SINC_MASK)
143 #define STM32_MDMA_CTCR_CFG_MASK        (STM32_MDMA_CTCR_SINC_MASK \
144                                         | STM32_MDMA_CTCR_DINC_MASK \
145                                         | STM32_MDMA_CTCR_SINCOS_MASK \
146                                         | STM32_MDMA_CTCR_DINCOS_MASK \
147                                         | STM32_MDMA_CTCR_LEN2_MSK \
148                                         | STM32_MDMA_CTCR_TRGM_MSK)
149
150 /* MDMA Channel x block number of data register */
151 #define STM32_MDMA_CBNDTR(x)            (0x54 + 0x40 * (x))
152 #define STM32_MDMA_CBNDTR_BRC_MK        GENMASK(31, 20)
153 #define STM32_MDMA_CBNDTR_BRC(n)        STM32_MDMA_SET(n, \
154                                                        STM32_MDMA_CBNDTR_BRC_MK)
155 #define STM32_MDMA_CBNDTR_BRC_GET(n)    STM32_MDMA_GET((n), \
156                                                        STM32_MDMA_CBNDTR_BRC_MK)
157
158 #define STM32_MDMA_CBNDTR_BRDUM         BIT(19)
159 #define STM32_MDMA_CBNDTR_BRSUM         BIT(18)
160 #define STM32_MDMA_CBNDTR_BNDT_MASK     GENMASK(16, 0)
161 #define STM32_MDMA_CBNDTR_BNDT(n)       STM32_MDMA_SET(n, \
162                                                     STM32_MDMA_CBNDTR_BNDT_MASK)
163
164 /* MDMA Channel x source address register */
165 #define STM32_MDMA_CSAR(x)              (0x58 + 0x40 * (x))
166
167 /* MDMA Channel x destination address register */
168 #define STM32_MDMA_CDAR(x)              (0x5C + 0x40 * (x))
169
170 /* MDMA Channel x block repeat address update register */
171 #define STM32_MDMA_CBRUR(x)             (0x60 + 0x40 * (x))
172 #define STM32_MDMA_CBRUR_DUV_MASK       GENMASK(31, 16)
173 #define STM32_MDMA_CBRUR_DUV(n)         STM32_MDMA_SET(n, \
174                                                       STM32_MDMA_CBRUR_DUV_MASK)
175 #define STM32_MDMA_CBRUR_SUV_MASK       GENMASK(15, 0)
176 #define STM32_MDMA_CBRUR_SUV(n)         STM32_MDMA_SET(n, \
177                                                       STM32_MDMA_CBRUR_SUV_MASK)
178
179 /* MDMA Channel x link address register */
180 #define STM32_MDMA_CLAR(x)              (0x64 + 0x40 * (x))
181
182 /* MDMA Channel x trigger and bus selection register */
183 #define STM32_MDMA_CTBR(x)              (0x68 + 0x40 * (x))
184 #define STM32_MDMA_CTBR_DBUS            BIT(17)
185 #define STM32_MDMA_CTBR_SBUS            BIT(16)
186 #define STM32_MDMA_CTBR_TSEL_MASK       GENMASK(5, 0)
187 #define STM32_MDMA_CTBR_TSEL(n)         STM32_MDMA_SET(n, \
188                                                       STM32_MDMA_CTBR_TSEL_MASK)
189
190 /* MDMA Channel x mask address register */
191 #define STM32_MDMA_CMAR(x)              (0x70 + 0x40 * (x))
192
193 /* MDMA Channel x mask data register */
194 #define STM32_MDMA_CMDR(x)              (0x74 + 0x40 * (x))
195
196 #define STM32_MDMA_MAX_BUF_LEN          128
197 #define STM32_MDMA_MAX_BLOCK_LEN        65536
198 #define STM32_MDMA_MAX_CHANNELS         32
199 #define STM32_MDMA_MAX_REQUESTS         256
200 #define STM32_MDMA_MAX_BURST            128
201 #define STM32_MDMA_VERY_HIGH_PRIORITY   0x11
202
203 enum stm32_mdma_trigger_mode {
204         STM32_MDMA_BUFFER,
205         STM32_MDMA_BLOCK,
206         STM32_MDMA_BLOCK_REP,
207         STM32_MDMA_LINKED_LIST,
208 };
209
210 enum stm32_mdma_width {
211         STM32_MDMA_BYTE,
212         STM32_MDMA_HALF_WORD,
213         STM32_MDMA_WORD,
214         STM32_MDMA_DOUBLE_WORD,
215 };
216
217 enum stm32_mdma_inc_mode {
218         STM32_MDMA_FIXED = 0,
219         STM32_MDMA_INC = 2,
220         STM32_MDMA_DEC = 3,
221 };
222
223 struct stm32_mdma_chan_config {
224         u32 request;
225         u32 priority_level;
226         u32 transfer_config;
227         u32 mask_addr;
228         u32 mask_data;
229 };
230
231 struct stm32_mdma_hwdesc {
232         u32 ctcr;
233         u32 cbndtr;
234         u32 csar;
235         u32 cdar;
236         u32 cbrur;
237         u32 clar;
238         u32 ctbr;
239         u32 dummy;
240         u32 cmar;
241         u32 cmdr;
242 } __aligned(64);
243
244 struct stm32_mdma_desc_node {
245         struct stm32_mdma_hwdesc *hwdesc;
246         dma_addr_t hwdesc_phys;
247 };
248
249 struct stm32_mdma_desc {
250         struct virt_dma_desc vdesc;
251         u32 ccr;
252         bool cyclic;
253         u32 count;
254         struct stm32_mdma_desc_node node[];
255 };
256
257 struct stm32_mdma_chan {
258         struct virt_dma_chan vchan;
259         struct dma_pool *desc_pool;
260         u32 id;
261         struct stm32_mdma_desc *desc;
262         u32 curr_hwdesc;
263         struct dma_slave_config dma_config;
264         struct stm32_mdma_chan_config chan_config;
265         bool busy;
266         u32 mem_burst;
267         u32 mem_width;
268 };
269
270 struct stm32_mdma_device {
271         struct dma_device ddev;
272         void __iomem *base;
273         struct clk *clk;
274         int irq;
275         u32 nr_channels;
276         u32 nr_requests;
277         u32 nr_ahb_addr_masks;
278         struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS];
279         u32 ahb_addr_masks[];
280 };
281
282 static struct stm32_mdma_device *stm32_mdma_get_dev(
283         struct stm32_mdma_chan *chan)
284 {
285         return container_of(chan->vchan.chan.device, struct stm32_mdma_device,
286                             ddev);
287 }
288
289 static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c)
290 {
291         return container_of(c, struct stm32_mdma_chan, vchan.chan);
292 }
293
294 static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc)
295 {
296         return container_of(vdesc, struct stm32_mdma_desc, vdesc);
297 }
298
299 static struct device *chan2dev(struct stm32_mdma_chan *chan)
300 {
301         return &chan->vchan.chan.dev->device;
302 }
303
304 static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev)
305 {
306         return mdma_dev->ddev.dev;
307 }
308
309 static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
310 {
311         return readl_relaxed(dmadev->base + reg);
312 }
313
314 static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
315 {
316         writel_relaxed(val, dmadev->base + reg);
317 }
318
319 static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
320                                 u32 mask)
321 {
322         void __iomem *addr = dmadev->base + reg;
323
324         writel_relaxed(readl_relaxed(addr) | mask, addr);
325 }
326
327 static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
328                                 u32 mask)
329 {
330         void __iomem *addr = dmadev->base + reg;
331
332         writel_relaxed(readl_relaxed(addr) & ~mask, addr);
333 }
334
335 static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
336                 struct stm32_mdma_chan *chan, u32 count)
337 {
338         struct stm32_mdma_desc *desc;
339         int i;
340
341         desc = kzalloc(offsetof(typeof(*desc), node[count]), GFP_NOWAIT);
342         if (!desc)
343                 return NULL;
344
345         for (i = 0; i < count; i++) {
346                 desc->node[i].hwdesc =
347                         dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
348                                        &desc->node[i].hwdesc_phys);
349                 if (!desc->node[i].hwdesc)
350                         goto err;
351         }
352
353         desc->count = count;
354
355         return desc;
356
357 err:
358         dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
359         while (--i >= 0)
360                 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
361                               desc->node[i].hwdesc_phys);
362         kfree(desc);
363         return NULL;
364 }
365
366 static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
367 {
368         struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
369         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
370         int i;
371
372         for (i = 0; i < desc->count; i++)
373                 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
374                               desc->node[i].hwdesc_phys);
375         kfree(desc);
376 }
377
378 static int stm32_mdma_get_width(struct stm32_mdma_chan *chan,
379                                 enum dma_slave_buswidth width)
380 {
381         switch (width) {
382         case DMA_SLAVE_BUSWIDTH_1_BYTE:
383         case DMA_SLAVE_BUSWIDTH_2_BYTES:
384         case DMA_SLAVE_BUSWIDTH_4_BYTES:
385         case DMA_SLAVE_BUSWIDTH_8_BYTES:
386                 return ffs(width) - 1;
387         default:
388                 dev_err(chan2dev(chan), "Dma bus width %i not supported\n",
389                         width);
390                 return -EINVAL;
391         }
392 }
393
394 static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr,
395                                                         u32 buf_len, u32 tlen)
396 {
397         enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
398
399         for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
400              max_width > DMA_SLAVE_BUSWIDTH_1_BYTE;
401              max_width >>= 1) {
402                 /*
403                  * Address and buffer length both have to be aligned on
404                  * bus width
405                  */
406                 if ((((buf_len | addr) & (max_width - 1)) == 0) &&
407                     tlen >= max_width)
408                         break;
409         }
410
411         return max_width;
412 }
413
414 static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst,
415                                      enum dma_slave_buswidth width)
416 {
417         u32 best_burst;
418
419         best_burst = min((u32)1 << __ffs(tlen | buf_len),
420                          max_burst * width) / width;
421
422         return (best_burst > 0) ? best_burst : 1;
423 }
424
425 static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan)
426 {
427         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
428         u32 ccr, cisr, id, reg;
429         int ret;
430
431         id = chan->id;
432         reg = STM32_MDMA_CCR(id);
433
434         /* Disable interrupts */
435         stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
436
437         ccr = stm32_mdma_read(dmadev, reg);
438         if (ccr & STM32_MDMA_CCR_EN) {
439                 stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
440
441                 /* Ensure that any ongoing transfer has been completed */
442                 ret = readl_relaxed_poll_timeout_atomic(
443                                 dmadev->base + STM32_MDMA_CISR(id), cisr,
444                                 (cisr & STM32_MDMA_CISR_CTCIF), 10, 1000);
445                 if (ret) {
446                         dev_err(chan2dev(chan), "%s: timeout!\n", __func__);
447                         return -EBUSY;
448                 }
449         }
450
451         return 0;
452 }
453
454 static void stm32_mdma_stop(struct stm32_mdma_chan *chan)
455 {
456         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
457         u32 status;
458         int ret;
459
460         /* Disable DMA */
461         ret = stm32_mdma_disable_chan(chan);
462         if (ret < 0)
463                 return;
464
465         /* Clear interrupt status if it is there */
466         status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
467         if (status) {
468                 dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
469                         __func__, status);
470                 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
471         }
472
473         chan->busy = false;
474 }
475
476 static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
477                                u32 ctbr_mask, u32 src_addr)
478 {
479         u32 mask;
480         int i;
481
482         /* Check if memory device is on AHB or AXI */
483         *ctbr &= ~ctbr_mask;
484         mask = src_addr & 0xF0000000;
485         for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
486                 if (mask == dmadev->ahb_addr_masks[i]) {
487                         *ctbr |= ctbr_mask;
488                         break;
489                 }
490         }
491 }
492
493 static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
494                                      enum dma_transfer_direction direction,
495                                      u32 *mdma_ccr, u32 *mdma_ctcr,
496                                      u32 *mdma_ctbr, dma_addr_t addr,
497                                      u32 buf_len)
498 {
499         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
500         struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
501         enum dma_slave_buswidth src_addr_width, dst_addr_width;
502         phys_addr_t src_addr, dst_addr;
503         int src_bus_width, dst_bus_width;
504         u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
505         u32 ccr, ctcr, ctbr, tlen;
506
507         src_addr_width = chan->dma_config.src_addr_width;
508         dst_addr_width = chan->dma_config.dst_addr_width;
509         src_maxburst = chan->dma_config.src_maxburst;
510         dst_maxburst = chan->dma_config.dst_maxburst;
511
512         ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & ~STM32_MDMA_CCR_EN;
513         ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
514         ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
515
516         /* Enable HW request mode */
517         ctcr &= ~STM32_MDMA_CTCR_SWRM;
518
519         /* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
520         ctcr &= ~STM32_MDMA_CTCR_CFG_MASK;
521         ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK;
522
523         /*
524          * For buffer transfer length (TLEN) we have to set
525          * the number of bytes - 1 in CTCR register
526          */
527         tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr);
528         ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK;
529         ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
530
531         /* Disable Pack Enable */
532         ctcr &= ~STM32_MDMA_CTCR_PKE;
533
534         /* Check burst size constraints */
535         if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST ||
536             dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) {
537                 dev_err(chan2dev(chan),
538                         "burst size * bus width higher than %d bytes\n",
539                         STM32_MDMA_MAX_BURST);
540                 return -EINVAL;
541         }
542
543         if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) ||
544             (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) {
545                 dev_err(chan2dev(chan), "burst size must be a power of 2\n");
546                 return -EINVAL;
547         }
548
549         /*
550          * Configure channel control:
551          * - Clear SW request as in this case this is a HW one
552          * - Clear WEX, HEX and BEX bits
553          * - Set priority level
554          */
555         ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
556                  STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK);
557         ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level);
558
559         /* Configure Trigger selection */
560         ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
561         ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request);
562
563         switch (direction) {
564         case DMA_MEM_TO_DEV:
565                 dst_addr = chan->dma_config.dst_addr;
566
567                 /* Set device data size */
568                 dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
569                 if (dst_bus_width < 0)
570                         return dst_bus_width;
571                 ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK;
572                 ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width);
573
574                 /* Set device burst value */
575                 dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
576                                                            dst_maxburst,
577                                                            dst_addr_width);
578                 chan->mem_burst = dst_best_burst;
579                 ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
580                 ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
581
582                 /* Set memory data size */
583                 src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
584                 chan->mem_width = src_addr_width;
585                 src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
586                 if (src_bus_width < 0)
587                         return src_bus_width;
588                 ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK |
589                         STM32_MDMA_CTCR_SINCOS_MASK;
590                 ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) |
591                         STM32_MDMA_CTCR_SINCOS(src_bus_width);
592
593                 /* Set memory burst value */
594                 src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
595                 src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
596                                                            src_maxburst,
597                                                            src_addr_width);
598                 chan->mem_burst = src_best_burst;
599                 ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
600                 ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
601
602                 /* Select bus */
603                 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
604                                    dst_addr);
605
606                 if (dst_bus_width != src_bus_width)
607                         ctcr |= STM32_MDMA_CTCR_PKE;
608
609                 /* Set destination address */
610                 stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
611                 break;
612
613         case DMA_DEV_TO_MEM:
614                 src_addr = chan->dma_config.src_addr;
615
616                 /* Set device data size */
617                 src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
618                 if (src_bus_width < 0)
619                         return src_bus_width;
620                 ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK;
621                 ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width);
622
623                 /* Set device burst value */
624                 src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
625                                                            src_maxburst,
626                                                            src_addr_width);
627                 ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
628                 ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
629
630                 /* Set memory data size */
631                 dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
632                 chan->mem_width = dst_addr_width;
633                 dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
634                 if (dst_bus_width < 0)
635                         return dst_bus_width;
636                 ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK |
637                         STM32_MDMA_CTCR_DINCOS_MASK);
638                 ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
639                         STM32_MDMA_CTCR_DINCOS(dst_bus_width);
640
641                 /* Set memory burst value */
642                 dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
643                 dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
644                                                            dst_maxburst,
645                                                            dst_addr_width);
646                 ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
647                 ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
648
649                 /* Select bus */
650                 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
651                                    src_addr);
652
653                 if (dst_bus_width != src_bus_width)
654                         ctcr |= STM32_MDMA_CTCR_PKE;
655
656                 /* Set source address */
657                 stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
658                 break;
659
660         default:
661                 dev_err(chan2dev(chan), "Dma direction is not supported\n");
662                 return -EINVAL;
663         }
664
665         *mdma_ccr = ccr;
666         *mdma_ctcr = ctcr;
667         *mdma_ctbr = ctbr;
668
669         return 0;
670 }
671
672 static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
673                                    struct stm32_mdma_desc_node *node)
674 {
675         dev_dbg(chan2dev(chan), "hwdesc:  %pad\n", &node->hwdesc_phys);
676         dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n", node->hwdesc->ctcr);
677         dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n", node->hwdesc->cbndtr);
678         dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n", node->hwdesc->csar);
679         dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n", node->hwdesc->cdar);
680         dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n", node->hwdesc->cbrur);
681         dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n", node->hwdesc->clar);
682         dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n", node->hwdesc->ctbr);
683         dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n", node->hwdesc->cmar);
684         dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n\n", node->hwdesc->cmdr);
685 }
686
687 static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
688                                     struct stm32_mdma_desc *desc,
689                                     enum dma_transfer_direction dir, u32 count,
690                                     dma_addr_t src_addr, dma_addr_t dst_addr,
691                                     u32 len, u32 ctcr, u32 ctbr, bool is_last,
692                                     bool is_first, bool is_cyclic)
693 {
694         struct stm32_mdma_chan_config *config = &chan->chan_config;
695         struct stm32_mdma_hwdesc *hwdesc;
696         u32 next = count + 1;
697
698         hwdesc = desc->node[count].hwdesc;
699         hwdesc->ctcr = ctcr;
700         hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
701                         STM32_MDMA_CBNDTR_BRDUM |
702                         STM32_MDMA_CBNDTR_BRSUM |
703                         STM32_MDMA_CBNDTR_BNDT_MASK);
704         hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
705         hwdesc->csar = src_addr;
706         hwdesc->cdar = dst_addr;
707         hwdesc->cbrur = 0;
708         hwdesc->ctbr = ctbr;
709         hwdesc->cmar = config->mask_addr;
710         hwdesc->cmdr = config->mask_data;
711
712         if (is_last) {
713                 if (is_cyclic)
714                         hwdesc->clar = desc->node[0].hwdesc_phys;
715                 else
716                         hwdesc->clar = 0;
717         } else {
718                 hwdesc->clar = desc->node[next].hwdesc_phys;
719         }
720
721         stm32_mdma_dump_hwdesc(chan, &desc->node[count]);
722 }
723
724 static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
725                                  struct stm32_mdma_desc *desc,
726                                  struct scatterlist *sgl, u32 sg_len,
727                                  enum dma_transfer_direction direction)
728 {
729         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
730         struct dma_slave_config *dma_config = &chan->dma_config;
731         struct scatterlist *sg;
732         dma_addr_t src_addr, dst_addr;
733         u32 ccr, ctcr, ctbr;
734         int i, ret = 0;
735
736         for_each_sg(sgl, sg, sg_len, i) {
737                 if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
738                         dev_err(chan2dev(chan), "Invalid block len\n");
739                         return -EINVAL;
740                 }
741
742                 if (direction == DMA_MEM_TO_DEV) {
743                         src_addr = sg_dma_address(sg);
744                         dst_addr = dma_config->dst_addr;
745                         ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
746                                                         &ctcr, &ctbr, src_addr,
747                                                         sg_dma_len(sg));
748                         stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
749                                            src_addr);
750                 } else {
751                         src_addr = dma_config->src_addr;
752                         dst_addr = sg_dma_address(sg);
753                         ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
754                                                         &ctcr, &ctbr, dst_addr,
755                                                         sg_dma_len(sg));
756                         stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
757                                            dst_addr);
758                 }
759
760                 if (ret < 0)
761                         return ret;
762
763                 stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
764                                         dst_addr, sg_dma_len(sg), ctcr, ctbr,
765                                         i == sg_len - 1, i == 0, false);
766         }
767
768         /* Enable interrupts */
769         ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
770         ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE;
771         if (sg_len > 1)
772                 ccr |= STM32_MDMA_CCR_BTIE;
773         desc->ccr = ccr;
774
775         return 0;
776 }
777
778 static struct dma_async_tx_descriptor *
779 stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
780                          u32 sg_len, enum dma_transfer_direction direction,
781                          unsigned long flags, void *context)
782 {
783         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
784         struct stm32_mdma_desc *desc;
785         int i, ret;
786
787         /*
788          * Once DMA is in setup cyclic mode the channel we cannot assign this
789          * channel anymore. The DMA channel needs to be aborted or terminated
790          * for allowing another request.
791          */
792         if (chan->desc && chan->desc->cyclic) {
793                 dev_err(chan2dev(chan),
794                         "Request not allowed when dma in cyclic mode\n");
795                 return NULL;
796         }
797
798         desc = stm32_mdma_alloc_desc(chan, sg_len);
799         if (!desc)
800                 return NULL;
801
802         ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction);
803         if (ret < 0)
804                 goto xfer_setup_err;
805
806         desc->cyclic = false;
807
808         return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
809
810 xfer_setup_err:
811         for (i = 0; i < desc->count; i++)
812                 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
813                               desc->node[i].hwdesc_phys);
814         kfree(desc);
815         return NULL;
816 }
817
818 static struct dma_async_tx_descriptor *
819 stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
820                            size_t buf_len, size_t period_len,
821                            enum dma_transfer_direction direction,
822                            unsigned long flags)
823 {
824         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
825         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
826         struct dma_slave_config *dma_config = &chan->dma_config;
827         struct stm32_mdma_desc *desc;
828         dma_addr_t src_addr, dst_addr;
829         u32 ccr, ctcr, ctbr, count;
830         int i, ret;
831
832         /*
833          * Once DMA is in setup cyclic mode the channel we cannot assign this
834          * channel anymore. The DMA channel needs to be aborted or terminated
835          * for allowing another request.
836          */
837         if (chan->desc && chan->desc->cyclic) {
838                 dev_err(chan2dev(chan),
839                         "Request not allowed when dma in cyclic mode\n");
840                 return NULL;
841         }
842
843         if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) {
844                 dev_err(chan2dev(chan), "Invalid buffer/period len\n");
845                 return NULL;
846         }
847
848         if (buf_len % period_len) {
849                 dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
850                 return NULL;
851         }
852
853         count = buf_len / period_len;
854
855         desc = stm32_mdma_alloc_desc(chan, count);
856         if (!desc)
857                 return NULL;
858
859         /* Select bus */
860         if (direction == DMA_MEM_TO_DEV) {
861                 src_addr = buf_addr;
862                 ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
863                                                 &ctbr, src_addr, period_len);
864                 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
865                                    src_addr);
866         } else {
867                 dst_addr = buf_addr;
868                 ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
869                                                 &ctbr, dst_addr, period_len);
870                 stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
871                                    dst_addr);
872         }
873
874         if (ret < 0)
875                 goto xfer_setup_err;
876
877         /* Enable interrupts */
878         ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
879         ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE;
880         desc->ccr = ccr;
881
882         /* Configure hwdesc list */
883         for (i = 0; i < count; i++) {
884                 if (direction == DMA_MEM_TO_DEV) {
885                         src_addr = buf_addr + i * period_len;
886                         dst_addr = dma_config->dst_addr;
887                 } else {
888                         src_addr = dma_config->src_addr;
889                         dst_addr = buf_addr + i * period_len;
890                 }
891
892                 stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
893                                         dst_addr, period_len, ctcr, ctbr,
894                                         i == count - 1, i == 0, true);
895         }
896
897         desc->cyclic = true;
898
899         return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
900
901 xfer_setup_err:
902         for (i = 0; i < desc->count; i++)
903                 dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
904                               desc->node[i].hwdesc_phys);
905         kfree(desc);
906         return NULL;
907 }
908
909 static struct dma_async_tx_descriptor *
910 stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
911                            size_t len, unsigned long flags)
912 {
913         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
914         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
915         enum dma_slave_buswidth max_width;
916         struct stm32_mdma_desc *desc;
917         struct stm32_mdma_hwdesc *hwdesc;
918         u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst;
919         u32 best_burst, tlen;
920         size_t xfer_count, offset;
921         int src_bus_width, dst_bus_width;
922         int i;
923
924         /*
925          * Once DMA is in setup cyclic mode the channel we cannot assign this
926          * channel anymore. The DMA channel needs to be aborted or terminated
927          * to allow another request
928          */
929         if (chan->desc && chan->desc->cyclic) {
930                 dev_err(chan2dev(chan),
931                         "Request not allowed when dma in cyclic mode\n");
932                 return NULL;
933         }
934
935         count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN);
936         desc = stm32_mdma_alloc_desc(chan, count);
937         if (!desc)
938                 return NULL;
939
940         ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & ~STM32_MDMA_CCR_EN;
941         ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
942         ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
943         cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
944
945         /* Enable sw req, some interrupts and clear other bits */
946         ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
947                  STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK |
948                  STM32_MDMA_CCR_IRQ_MASK);
949         ccr |= STM32_MDMA_CCR_TEIE;
950
951         /* Enable SW request mode, dest/src inc and clear other bits */
952         ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK |
953                   STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE |
954                   STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK |
955                   STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK |
956                   STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK |
957                   STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK |
958                   STM32_MDMA_CTCR_SINC_MASK);
959         ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) |
960                 STM32_MDMA_CTCR_DINC(STM32_MDMA_INC);
961
962         /* Reset HW request */
963         ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
964
965         /* Select bus */
966         stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
967         stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
968
969         /* Clear CBNDTR registers */
970         cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM |
971                         STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK);
972
973         if (len <= STM32_MDMA_MAX_BLOCK_LEN) {
974                 cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
975                 if (len <= STM32_MDMA_MAX_BUF_LEN) {
976                         /* Setup a buffer transfer */
977                         ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE;
978                         ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER);
979                 } else {
980                         /* Setup a block transfer */
981                         ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
982                         ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK);
983                 }
984
985                 tlen = STM32_MDMA_MAX_BUF_LEN;
986                 ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
987
988                 /* Set source best burst size */
989                 max_width = stm32_mdma_get_max_width(src, len, tlen);
990                 src_bus_width = stm32_mdma_get_width(chan, max_width);
991
992                 max_burst = tlen / max_width;
993                 best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
994                                                        max_width);
995                 mdma_burst = ilog2(best_burst);
996
997                 ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
998                         STM32_MDMA_CTCR_SSIZE(src_bus_width) |
999                         STM32_MDMA_CTCR_SINCOS(src_bus_width);
1000
1001                 /* Set destination best burst size */
1002                 max_width = stm32_mdma_get_max_width(dest, len, tlen);
1003                 dst_bus_width = stm32_mdma_get_width(chan, max_width);
1004
1005                 max_burst = tlen / max_width;
1006                 best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
1007                                                        max_width);
1008                 mdma_burst = ilog2(best_burst);
1009
1010                 ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1011                         STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1012                         STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1013
1014                 if (dst_bus_width != src_bus_width)
1015                         ctcr |= STM32_MDMA_CTCR_PKE;
1016
1017                 /* Prepare hardware descriptor */
1018                 hwdesc = desc->node[0].hwdesc;
1019                 hwdesc->ctcr = ctcr;
1020                 hwdesc->cbndtr = cbndtr;
1021                 hwdesc->csar = src;
1022                 hwdesc->cdar = dest;
1023                 hwdesc->cbrur = 0;
1024                 hwdesc->clar = 0;
1025                 hwdesc->ctbr = ctbr;
1026                 hwdesc->cmar = 0;
1027                 hwdesc->cmdr = 0;
1028
1029                 stm32_mdma_dump_hwdesc(chan, &desc->node[0]);
1030         } else {
1031                 /* Setup a LLI transfer */
1032                 ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
1033                         STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1));
1034                 ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1035                 tlen = STM32_MDMA_MAX_BUF_LEN;
1036
1037                 for (i = 0, offset = 0; offset < len;
1038                      i++, offset += xfer_count) {
1039                         xfer_count = min_t(size_t, len - offset,
1040                                            STM32_MDMA_MAX_BLOCK_LEN);
1041
1042                         /* Set source best burst size */
1043                         max_width = stm32_mdma_get_max_width(src, len, tlen);
1044                         src_bus_width = stm32_mdma_get_width(chan, max_width);
1045
1046                         max_burst = tlen / max_width;
1047                         best_burst = stm32_mdma_get_best_burst(len, tlen,
1048                                                                max_burst,
1049                                                                max_width);
1050                         mdma_burst = ilog2(best_burst);
1051
1052                         ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1053                                 STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1054                                 STM32_MDMA_CTCR_SINCOS(src_bus_width);
1055
1056                         /* Set destination best burst size */
1057                         max_width = stm32_mdma_get_max_width(dest, len, tlen);
1058                         dst_bus_width = stm32_mdma_get_width(chan, max_width);
1059
1060                         max_burst = tlen / max_width;
1061                         best_burst = stm32_mdma_get_best_burst(len, tlen,
1062                                                                max_burst,
1063                                                                max_width);
1064                         mdma_burst = ilog2(best_burst);
1065
1066                         ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1067                                 STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1068                                 STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1069
1070                         if (dst_bus_width != src_bus_width)
1071                                 ctcr |= STM32_MDMA_CTCR_PKE;
1072
1073                         /* Prepare hardware descriptor */
1074                         stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i,
1075                                                 src + offset, dest + offset,
1076                                                 xfer_count, ctcr, ctbr,
1077                                                 i == count - 1, i == 0, false);
1078                 }
1079         }
1080
1081         desc->ccr = ccr;
1082
1083         desc->cyclic = false;
1084
1085         return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1086 }
1087
1088 static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan)
1089 {
1090         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1091
1092         dev_dbg(chan2dev(chan), "CCR:     0x%08x\n",
1093                 stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
1094         dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n",
1095                 stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
1096         dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n",
1097                 stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
1098         dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n",
1099                 stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
1100         dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n",
1101                 stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
1102         dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n",
1103                 stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
1104         dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n",
1105                 stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
1106         dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n",
1107                 stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
1108         dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n",
1109                 stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
1110         dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n",
1111                 stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
1112 }
1113
1114 static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
1115 {
1116         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1117         struct virt_dma_desc *vdesc;
1118         struct stm32_mdma_hwdesc *hwdesc;
1119         u32 id = chan->id;
1120         u32 status, reg;
1121
1122         vdesc = vchan_next_desc(&chan->vchan);
1123         if (!vdesc) {
1124                 chan->desc = NULL;
1125                 return;
1126         }
1127
1128         list_del(&vdesc->node);
1129
1130         chan->desc = to_stm32_mdma_desc(vdesc);
1131         hwdesc = chan->desc->node[0].hwdesc;
1132         chan->curr_hwdesc = 0;
1133
1134         stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
1135         stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
1136         stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
1137         stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
1138         stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
1139         stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
1140         stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
1141         stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
1142         stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
1143         stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
1144
1145         /* Clear interrupt status if it is there */
1146         status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1147         if (status)
1148                 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
1149
1150         stm32_mdma_dump_reg(chan);
1151
1152         /* Start DMA */
1153         stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
1154
1155         /* Set SW request in case of MEM2MEM transfer */
1156         if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) {
1157                 reg = STM32_MDMA_CCR(id);
1158                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1159         }
1160
1161         chan->busy = true;
1162
1163         dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan);
1164 }
1165
1166 static void stm32_mdma_issue_pending(struct dma_chan *c)
1167 {
1168         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1169         unsigned long flags;
1170
1171         spin_lock_irqsave(&chan->vchan.lock, flags);
1172
1173         if (!vchan_issue_pending(&chan->vchan))
1174                 goto end;
1175
1176         dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan);
1177
1178         if (!chan->desc && !chan->busy)
1179                 stm32_mdma_start_transfer(chan);
1180
1181 end:
1182         spin_unlock_irqrestore(&chan->vchan.lock, flags);
1183 }
1184
1185 static int stm32_mdma_pause(struct dma_chan *c)
1186 {
1187         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1188         unsigned long flags;
1189         int ret;
1190
1191         spin_lock_irqsave(&chan->vchan.lock, flags);
1192         ret = stm32_mdma_disable_chan(chan);
1193         spin_unlock_irqrestore(&chan->vchan.lock, flags);
1194
1195         if (!ret)
1196                 dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan);
1197
1198         return ret;
1199 }
1200
1201 static int stm32_mdma_resume(struct dma_chan *c)
1202 {
1203         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1204         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1205         struct stm32_mdma_hwdesc *hwdesc;
1206         unsigned long flags;
1207         u32 status, reg;
1208
1209         /* Transfer can be terminated */
1210         if (!chan->desc || (stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)) & STM32_MDMA_CCR_EN))
1211                 return -EPERM;
1212
1213         hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc;
1214
1215         spin_lock_irqsave(&chan->vchan.lock, flags);
1216
1217         /* Re-configure control register */
1218         stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
1219
1220         /* Clear interrupt status if it is there */
1221         status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1222         if (status)
1223                 stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
1224
1225         stm32_mdma_dump_reg(chan);
1226
1227         /* Re-start DMA */
1228         reg = STM32_MDMA_CCR(chan->id);
1229         stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
1230
1231         /* Set SW request in case of MEM2MEM transfer */
1232         if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM)
1233                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1234
1235         spin_unlock_irqrestore(&chan->vchan.lock, flags);
1236
1237         dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan);
1238
1239         return 0;
1240 }
1241
1242 static int stm32_mdma_terminate_all(struct dma_chan *c)
1243 {
1244         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1245         unsigned long flags;
1246         LIST_HEAD(head);
1247
1248         spin_lock_irqsave(&chan->vchan.lock, flags);
1249         if (chan->desc) {
1250                 vchan_terminate_vdesc(&chan->desc->vdesc);
1251                 if (chan->busy)
1252                         stm32_mdma_stop(chan);
1253                 chan->desc = NULL;
1254         }
1255         vchan_get_all_descriptors(&chan->vchan, &head);
1256         spin_unlock_irqrestore(&chan->vchan.lock, flags);
1257
1258         vchan_dma_desc_free_list(&chan->vchan, &head);
1259
1260         return 0;
1261 }
1262
1263 static void stm32_mdma_synchronize(struct dma_chan *c)
1264 {
1265         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1266
1267         vchan_synchronize(&chan->vchan);
1268 }
1269
1270 static int stm32_mdma_slave_config(struct dma_chan *c,
1271                                    struct dma_slave_config *config)
1272 {
1273         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1274
1275         memcpy(&chan->dma_config, config, sizeof(*config));
1276
1277         return 0;
1278 }
1279
1280 static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
1281                                       struct stm32_mdma_desc *desc,
1282                                       u32 curr_hwdesc)
1283 {
1284         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1285         struct stm32_mdma_hwdesc *hwdesc = desc->node[0].hwdesc;
1286         u32 cbndtr, residue, modulo, burst_size;
1287         int i;
1288
1289         residue = 0;
1290         for (i = curr_hwdesc + 1; i < desc->count; i++) {
1291                 hwdesc = desc->node[i].hwdesc;
1292                 residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
1293         }
1294         cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
1295         residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
1296
1297         if (!chan->mem_burst)
1298                 return residue;
1299
1300         burst_size = chan->mem_burst * chan->mem_width;
1301         modulo = residue % burst_size;
1302         if (modulo)
1303                 residue = residue - modulo + burst_size;
1304
1305         return residue;
1306 }
1307
1308 static enum dma_status stm32_mdma_tx_status(struct dma_chan *c,
1309                                             dma_cookie_t cookie,
1310                                             struct dma_tx_state *state)
1311 {
1312         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1313         struct virt_dma_desc *vdesc;
1314         enum dma_status status;
1315         unsigned long flags;
1316         u32 residue = 0;
1317
1318         status = dma_cookie_status(c, cookie, state);
1319         if ((status == DMA_COMPLETE) || (!state))
1320                 return status;
1321
1322         spin_lock_irqsave(&chan->vchan.lock, flags);
1323
1324         vdesc = vchan_find_desc(&chan->vchan, cookie);
1325         if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
1326                 residue = stm32_mdma_desc_residue(chan, chan->desc,
1327                                                   chan->curr_hwdesc);
1328         else if (vdesc)
1329                 residue = stm32_mdma_desc_residue(chan,
1330                                                   to_stm32_mdma_desc(vdesc), 0);
1331         dma_set_residue(state, residue);
1332
1333         spin_unlock_irqrestore(&chan->vchan.lock, flags);
1334
1335         return status;
1336 }
1337
1338 static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan)
1339 {
1340         vchan_cookie_complete(&chan->desc->vdesc);
1341         chan->desc = NULL;
1342         chan->busy = false;
1343
1344         /* Start the next transfer if this driver has a next desc */
1345         stm32_mdma_start_transfer(chan);
1346 }
1347
1348 static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid)
1349 {
1350         struct stm32_mdma_device *dmadev = devid;
1351         struct stm32_mdma_chan *chan;
1352         u32 reg, id, ccr, ien, status;
1353
1354         /* Find out which channel generates the interrupt */
1355         status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
1356         if (!status) {
1357                 dev_dbg(mdma2dev(dmadev), "spurious it\n");
1358                 return IRQ_NONE;
1359         }
1360         id = __ffs(status);
1361
1362         chan = &dmadev->chan[id];
1363         if (!chan) {
1364                 dev_warn(mdma2dev(dmadev), "MDMA channel not initialized\n");
1365                 return IRQ_NONE;
1366         }
1367
1368         /* Handle interrupt for the channel */
1369         spin_lock(&chan->vchan.lock);
1370         status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1371         /* Mask Channel ReQuest Active bit which can be set in case of MEM2MEM */
1372         status &= ~STM32_MDMA_CISR_CRQA;
1373         ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1374         ien = (ccr & STM32_MDMA_CCR_IRQ_MASK) >> 1;
1375
1376         if (!(status & ien)) {
1377                 spin_unlock(&chan->vchan.lock);
1378                 dev_warn(chan2dev(chan),
1379                          "spurious it (status=0x%04x, ien=0x%04x)\n",
1380                          status, ien);
1381                 return IRQ_NONE;
1382         }
1383
1384         reg = STM32_MDMA_CIFCR(id);
1385
1386         if (status & STM32_MDMA_CISR_TEIF) {
1387                 dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n",
1388                         readl_relaxed(dmadev->base + STM32_MDMA_CESR(id)));
1389                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
1390                 status &= ~STM32_MDMA_CISR_TEIF;
1391         }
1392
1393         if (status & STM32_MDMA_CISR_CTCIF) {
1394                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
1395                 status &= ~STM32_MDMA_CISR_CTCIF;
1396                 stm32_mdma_xfer_end(chan);
1397         }
1398
1399         if (status & STM32_MDMA_CISR_BRTIF) {
1400                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
1401                 status &= ~STM32_MDMA_CISR_BRTIF;
1402         }
1403
1404         if (status & STM32_MDMA_CISR_BTIF) {
1405                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
1406                 status &= ~STM32_MDMA_CISR_BTIF;
1407                 chan->curr_hwdesc++;
1408                 if (chan->desc && chan->desc->cyclic) {
1409                         if (chan->curr_hwdesc == chan->desc->count)
1410                                 chan->curr_hwdesc = 0;
1411                         vchan_cyclic_callback(&chan->desc->vdesc);
1412                 }
1413         }
1414
1415         if (status & STM32_MDMA_CISR_TCIF) {
1416                 stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
1417                 status &= ~STM32_MDMA_CISR_TCIF;
1418         }
1419
1420         if (status) {
1421                 stm32_mdma_set_bits(dmadev, reg, status);
1422                 dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status);
1423                 if (!(ccr & STM32_MDMA_CCR_EN))
1424                         dev_err(chan2dev(chan), "chan disabled by HW\n");
1425         }
1426
1427         spin_unlock(&chan->vchan.lock);
1428
1429         return IRQ_HANDLED;
1430 }
1431
1432 static int stm32_mdma_alloc_chan_resources(struct dma_chan *c)
1433 {
1434         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1435         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1436         int ret;
1437
1438         chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device),
1439                                            c->device->dev,
1440                                            sizeof(struct stm32_mdma_hwdesc),
1441                                           __alignof__(struct stm32_mdma_hwdesc),
1442                                            0);
1443         if (!chan->desc_pool) {
1444                 dev_err(chan2dev(chan), "failed to allocate descriptor pool\n");
1445                 return -ENOMEM;
1446         }
1447
1448         ret = pm_runtime_resume_and_get(dmadev->ddev.dev);
1449         if (ret < 0)
1450                 return ret;
1451
1452         ret = stm32_mdma_disable_chan(chan);
1453         if (ret < 0)
1454                 pm_runtime_put(dmadev->ddev.dev);
1455
1456         return ret;
1457 }
1458
1459 static void stm32_mdma_free_chan_resources(struct dma_chan *c)
1460 {
1461         struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1462         struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1463         unsigned long flags;
1464
1465         dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
1466
1467         if (chan->busy) {
1468                 spin_lock_irqsave(&chan->vchan.lock, flags);
1469                 stm32_mdma_stop(chan);
1470                 chan->desc = NULL;
1471                 spin_unlock_irqrestore(&chan->vchan.lock, flags);
1472         }
1473
1474         pm_runtime_put(dmadev->ddev.dev);
1475         vchan_free_chan_resources(to_virt_chan(c));
1476         dmam_pool_destroy(chan->desc_pool);
1477         chan->desc_pool = NULL;
1478 }
1479
1480 static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec,
1481                                             struct of_dma *ofdma)
1482 {
1483         struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
1484         struct stm32_mdma_chan *chan;
1485         struct dma_chan *c;
1486         struct stm32_mdma_chan_config config;
1487
1488         if (dma_spec->args_count < 5) {
1489                 dev_err(mdma2dev(dmadev), "Bad number of args\n");
1490                 return NULL;
1491         }
1492
1493         config.request = dma_spec->args[0];
1494         config.priority_level = dma_spec->args[1];
1495         config.transfer_config = dma_spec->args[2];
1496         config.mask_addr = dma_spec->args[3];
1497         config.mask_data = dma_spec->args[4];
1498
1499         if (config.request >= dmadev->nr_requests) {
1500                 dev_err(mdma2dev(dmadev), "Bad request line\n");
1501                 return NULL;
1502         }
1503
1504         if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) {
1505                 dev_err(mdma2dev(dmadev), "Priority level not supported\n");
1506                 return NULL;
1507         }
1508
1509         c = dma_get_any_slave_channel(&dmadev->ddev);
1510         if (!c) {
1511                 dev_err(mdma2dev(dmadev), "No more channels available\n");
1512                 return NULL;
1513         }
1514
1515         chan = to_stm32_mdma_chan(c);
1516         chan->chan_config = config;
1517
1518         return c;
1519 }
1520
1521 static const struct of_device_id stm32_mdma_of_match[] = {
1522         { .compatible = "st,stm32h7-mdma", },
1523         { /* sentinel */ },
1524 };
1525 MODULE_DEVICE_TABLE(of, stm32_mdma_of_match);
1526
1527 static int stm32_mdma_probe(struct platform_device *pdev)
1528 {
1529         struct stm32_mdma_chan *chan;
1530         struct stm32_mdma_device *dmadev;
1531         struct dma_device *dd;
1532         struct device_node *of_node;
1533         struct resource *res;
1534         struct reset_control *rst;
1535         u32 nr_channels, nr_requests;
1536         int i, count, ret;
1537
1538         of_node = pdev->dev.of_node;
1539         if (!of_node)
1540                 return -ENODEV;
1541
1542         ret = device_property_read_u32(&pdev->dev, "dma-channels",
1543                                        &nr_channels);
1544         if (ret) {
1545                 nr_channels = STM32_MDMA_MAX_CHANNELS;
1546                 dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n",
1547                          nr_channels);
1548         }
1549
1550         ret = device_property_read_u32(&pdev->dev, "dma-requests",
1551                                        &nr_requests);
1552         if (ret) {
1553                 nr_requests = STM32_MDMA_MAX_REQUESTS;
1554                 dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n",
1555                          nr_requests);
1556         }
1557
1558         count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks");
1559         if (count < 0)
1560                 count = 0;
1561
1562         dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev) + sizeof(u32) * count,
1563                               GFP_KERNEL);
1564         if (!dmadev)
1565                 return -ENOMEM;
1566
1567         dmadev->nr_channels = nr_channels;
1568         dmadev->nr_requests = nr_requests;
1569         device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1570                                        dmadev->ahb_addr_masks,
1571                                        count);
1572         dmadev->nr_ahb_addr_masks = count;
1573
1574         res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1575         dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1576         if (IS_ERR(dmadev->base))
1577                 return PTR_ERR(dmadev->base);
1578
1579         dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1580         if (IS_ERR(dmadev->clk))
1581                 return dev_err_probe(&pdev->dev, PTR_ERR(dmadev->clk),
1582                                      "Missing clock controller\n");
1583
1584         ret = clk_prepare_enable(dmadev->clk);
1585         if (ret < 0) {
1586                 dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret);
1587                 return ret;
1588         }
1589
1590         rst = devm_reset_control_get(&pdev->dev, NULL);
1591         if (IS_ERR(rst)) {
1592                 ret = PTR_ERR(rst);
1593                 if (ret == -EPROBE_DEFER)
1594                         goto err_clk;
1595         } else {
1596                 reset_control_assert(rst);
1597                 udelay(2);
1598                 reset_control_deassert(rst);
1599         }
1600
1601         dd = &dmadev->ddev;
1602         dma_cap_set(DMA_SLAVE, dd->cap_mask);
1603         dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1604         dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1605         dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1606         dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources;
1607         dd->device_free_chan_resources = stm32_mdma_free_chan_resources;
1608         dd->device_tx_status = stm32_mdma_tx_status;
1609         dd->device_issue_pending = stm32_mdma_issue_pending;
1610         dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg;
1611         dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic;
1612         dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy;
1613         dd->device_config = stm32_mdma_slave_config;
1614         dd->device_pause = stm32_mdma_pause;
1615         dd->device_resume = stm32_mdma_resume;
1616         dd->device_terminate_all = stm32_mdma_terminate_all;
1617         dd->device_synchronize = stm32_mdma_synchronize;
1618         dd->descriptor_reuse = true;
1619
1620         dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1621                 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1622                 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1623                 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1624         dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1625                 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1626                 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1627                 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1628         dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
1629                 BIT(DMA_MEM_TO_MEM);
1630         dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1631         dd->max_burst = STM32_MDMA_MAX_BURST;
1632         dd->dev = &pdev->dev;
1633         INIT_LIST_HEAD(&dd->channels);
1634
1635         for (i = 0; i < dmadev->nr_channels; i++) {
1636                 chan = &dmadev->chan[i];
1637                 chan->id = i;
1638                 chan->vchan.desc_free = stm32_mdma_desc_free;
1639                 vchan_init(&chan->vchan, dd);
1640         }
1641
1642         dmadev->irq = platform_get_irq(pdev, 0);
1643         if (dmadev->irq < 0) {
1644                 ret = dmadev->irq;
1645                 goto err_clk;
1646         }
1647
1648         ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
1649                                0, dev_name(&pdev->dev), dmadev);
1650         if (ret) {
1651                 dev_err(&pdev->dev, "failed to request IRQ\n");
1652                 goto err_clk;
1653         }
1654
1655         ret = dmaenginem_async_device_register(dd);
1656         if (ret)
1657                 goto err_clk;
1658
1659         ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
1660         if (ret < 0) {
1661                 dev_err(&pdev->dev,
1662                         "STM32 MDMA DMA OF registration failed %d\n", ret);
1663                 goto err_clk;
1664         }
1665
1666         platform_set_drvdata(pdev, dmadev);
1667         pm_runtime_set_active(&pdev->dev);
1668         pm_runtime_enable(&pdev->dev);
1669         pm_runtime_get_noresume(&pdev->dev);
1670         pm_runtime_put(&pdev->dev);
1671
1672         dev_info(&pdev->dev, "STM32 MDMA driver registered\n");
1673
1674         return 0;
1675
1676 err_clk:
1677         clk_disable_unprepare(dmadev->clk);
1678
1679         return ret;
1680 }
1681
1682 #ifdef CONFIG_PM
1683 static int stm32_mdma_runtime_suspend(struct device *dev)
1684 {
1685         struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1686
1687         clk_disable_unprepare(dmadev->clk);
1688
1689         return 0;
1690 }
1691
1692 static int stm32_mdma_runtime_resume(struct device *dev)
1693 {
1694         struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1695         int ret;
1696
1697         ret = clk_prepare_enable(dmadev->clk);
1698         if (ret) {
1699                 dev_err(dev, "failed to prepare_enable clock\n");
1700                 return ret;
1701         }
1702
1703         return 0;
1704 }
1705 #endif
1706
1707 #ifdef CONFIG_PM_SLEEP
1708 static int stm32_mdma_pm_suspend(struct device *dev)
1709 {
1710         struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1711         u32 ccr, id;
1712         int ret;
1713
1714         ret = pm_runtime_resume_and_get(dev);
1715         if (ret < 0)
1716                 return ret;
1717
1718         for (id = 0; id < dmadev->nr_channels; id++) {
1719                 ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1720                 if (ccr & STM32_MDMA_CCR_EN) {
1721                         dev_warn(dev, "Suspend is prevented by Chan %i\n", id);
1722                         return -EBUSY;
1723                 }
1724         }
1725
1726         pm_runtime_put_sync(dev);
1727
1728         pm_runtime_force_suspend(dev);
1729
1730         return 0;
1731 }
1732
1733 static int stm32_mdma_pm_resume(struct device *dev)
1734 {
1735         return pm_runtime_force_resume(dev);
1736 }
1737 #endif
1738
1739 static const struct dev_pm_ops stm32_mdma_pm_ops = {
1740         SET_SYSTEM_SLEEP_PM_OPS(stm32_mdma_pm_suspend, stm32_mdma_pm_resume)
1741         SET_RUNTIME_PM_OPS(stm32_mdma_runtime_suspend,
1742                            stm32_mdma_runtime_resume, NULL)
1743 };
1744
1745 static struct platform_driver stm32_mdma_driver = {
1746         .probe = stm32_mdma_probe,
1747         .driver = {
1748                 .name = "stm32-mdma",
1749                 .of_match_table = stm32_mdma_of_match,
1750                 .pm = &stm32_mdma_pm_ops,
1751         },
1752 };
1753
1754 static int __init stm32_mdma_init(void)
1755 {
1756         return platform_driver_register(&stm32_mdma_driver);
1757 }
1758
1759 subsys_initcall(stm32_mdma_init);
1760
1761 MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1762 MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1763 MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
1764 MODULE_LICENSE("GPL v2");