GNU Linux-libre 5.19-rc6-gnu
[releases.git] / drivers / crypto / stm32 / stm32-cryp.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (C) STMicroelectronics SA 2017
4  * Author: Fabien Dessenne <fabien.dessenne@st.com>
5  */
6
7 #include <linux/clk.h>
8 #include <linux/delay.h>
9 #include <linux/interrupt.h>
10 #include <linux/iopoll.h>
11 #include <linux/module.h>
12 #include <linux/of_device.h>
13 #include <linux/platform_device.h>
14 #include <linux/pm_runtime.h>
15 #include <linux/reset.h>
16
17 #include <crypto/aes.h>
18 #include <crypto/internal/des.h>
19 #include <crypto/engine.h>
20 #include <crypto/scatterwalk.h>
21 #include <crypto/internal/aead.h>
22 #include <crypto/internal/skcipher.h>
23
24 #define DRIVER_NAME             "stm32-cryp"
25
26 /* Bit [0] encrypt / decrypt */
27 #define FLG_ENCRYPT             BIT(0)
28 /* Bit [8..1] algo & operation mode */
29 #define FLG_AES                 BIT(1)
30 #define FLG_DES                 BIT(2)
31 #define FLG_TDES                BIT(3)
32 #define FLG_ECB                 BIT(4)
33 #define FLG_CBC                 BIT(5)
34 #define FLG_CTR                 BIT(6)
35 #define FLG_GCM                 BIT(7)
36 #define FLG_CCM                 BIT(8)
37 /* Mode mask = bits [15..0] */
38 #define FLG_MODE_MASK           GENMASK(15, 0)
39 /* Bit [31..16] status  */
40
41 /* Registers */
42 #define CRYP_CR                 0x00000000
43 #define CRYP_SR                 0x00000004
44 #define CRYP_DIN                0x00000008
45 #define CRYP_DOUT               0x0000000C
46 #define CRYP_DMACR              0x00000010
47 #define CRYP_IMSCR              0x00000014
48 #define CRYP_RISR               0x00000018
49 #define CRYP_MISR               0x0000001C
50 #define CRYP_K0LR               0x00000020
51 #define CRYP_K0RR               0x00000024
52 #define CRYP_K1LR               0x00000028
53 #define CRYP_K1RR               0x0000002C
54 #define CRYP_K2LR               0x00000030
55 #define CRYP_K2RR               0x00000034
56 #define CRYP_K3LR               0x00000038
57 #define CRYP_K3RR               0x0000003C
58 #define CRYP_IV0LR              0x00000040
59 #define CRYP_IV0RR              0x00000044
60 #define CRYP_IV1LR              0x00000048
61 #define CRYP_IV1RR              0x0000004C
62 #define CRYP_CSGCMCCM0R         0x00000050
63 #define CRYP_CSGCM0R            0x00000070
64
65 /* Registers values */
66 #define CR_DEC_NOT_ENC          0x00000004
67 #define CR_TDES_ECB             0x00000000
68 #define CR_TDES_CBC             0x00000008
69 #define CR_DES_ECB              0x00000010
70 #define CR_DES_CBC              0x00000018
71 #define CR_AES_ECB              0x00000020
72 #define CR_AES_CBC              0x00000028
73 #define CR_AES_CTR              0x00000030
74 #define CR_AES_KP               0x00000038
75 #define CR_AES_GCM              0x00080000
76 #define CR_AES_CCM              0x00080008
77 #define CR_AES_UNKNOWN          0xFFFFFFFF
78 #define CR_ALGO_MASK            0x00080038
79 #define CR_DATA32               0x00000000
80 #define CR_DATA16               0x00000040
81 #define CR_DATA8                0x00000080
82 #define CR_DATA1                0x000000C0
83 #define CR_KEY128               0x00000000
84 #define CR_KEY192               0x00000100
85 #define CR_KEY256               0x00000200
86 #define CR_FFLUSH               0x00004000
87 #define CR_CRYPEN               0x00008000
88 #define CR_PH_INIT              0x00000000
89 #define CR_PH_HEADER            0x00010000
90 #define CR_PH_PAYLOAD           0x00020000
91 #define CR_PH_FINAL             0x00030000
92 #define CR_PH_MASK              0x00030000
93 #define CR_NBPBL_SHIFT          20
94
95 #define SR_BUSY                 0x00000010
96 #define SR_OFNE                 0x00000004
97
98 #define IMSCR_IN                BIT(0)
99 #define IMSCR_OUT               BIT(1)
100
101 #define MISR_IN                 BIT(0)
102 #define MISR_OUT                BIT(1)
103
104 /* Misc */
105 #define AES_BLOCK_32            (AES_BLOCK_SIZE / sizeof(u32))
106 #define GCM_CTR_INIT            2
107 #define CRYP_AUTOSUSPEND_DELAY  50
108
109 struct stm32_cryp_caps {
110         bool                    swap_final;
111         bool                    padding_wa;
112 };
113
114 struct stm32_cryp_ctx {
115         struct crypto_engine_ctx enginectx;
116         struct stm32_cryp       *cryp;
117         int                     keylen;
118         __be32                  key[AES_KEYSIZE_256 / sizeof(u32)];
119         unsigned long           flags;
120 };
121
122 struct stm32_cryp_reqctx {
123         unsigned long mode;
124 };
125
126 struct stm32_cryp {
127         struct list_head        list;
128         struct device           *dev;
129         void __iomem            *regs;
130         struct clk              *clk;
131         unsigned long           flags;
132         u32                     irq_status;
133         const struct stm32_cryp_caps *caps;
134         struct stm32_cryp_ctx   *ctx;
135
136         struct crypto_engine    *engine;
137
138         struct skcipher_request *req;
139         struct aead_request     *areq;
140
141         size_t                  authsize;
142         size_t                  hw_blocksize;
143
144         size_t                  payload_in;
145         size_t                  header_in;
146         size_t                  payload_out;
147
148         struct scatterlist      *out_sg;
149
150         struct scatter_walk     in_walk;
151         struct scatter_walk     out_walk;
152
153         __be32                  last_ctr[4];
154         u32                     gcm_ctr;
155 };
156
157 struct stm32_cryp_list {
158         struct list_head        dev_list;
159         spinlock_t              lock; /* protect dev_list */
160 };
161
162 static struct stm32_cryp_list cryp_list = {
163         .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
164         .lock     = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
165 };
166
167 static inline bool is_aes(struct stm32_cryp *cryp)
168 {
169         return cryp->flags & FLG_AES;
170 }
171
172 static inline bool is_des(struct stm32_cryp *cryp)
173 {
174         return cryp->flags & FLG_DES;
175 }
176
177 static inline bool is_tdes(struct stm32_cryp *cryp)
178 {
179         return cryp->flags & FLG_TDES;
180 }
181
182 static inline bool is_ecb(struct stm32_cryp *cryp)
183 {
184         return cryp->flags & FLG_ECB;
185 }
186
187 static inline bool is_cbc(struct stm32_cryp *cryp)
188 {
189         return cryp->flags & FLG_CBC;
190 }
191
192 static inline bool is_ctr(struct stm32_cryp *cryp)
193 {
194         return cryp->flags & FLG_CTR;
195 }
196
197 static inline bool is_gcm(struct stm32_cryp *cryp)
198 {
199         return cryp->flags & FLG_GCM;
200 }
201
202 static inline bool is_ccm(struct stm32_cryp *cryp)
203 {
204         return cryp->flags & FLG_CCM;
205 }
206
207 static inline bool is_encrypt(struct stm32_cryp *cryp)
208 {
209         return cryp->flags & FLG_ENCRYPT;
210 }
211
212 static inline bool is_decrypt(struct stm32_cryp *cryp)
213 {
214         return !is_encrypt(cryp);
215 }
216
217 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
218 {
219         return readl_relaxed(cryp->regs + ofst);
220 }
221
222 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
223 {
224         writel_relaxed(val, cryp->regs + ofst);
225 }
226
227 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
228 {
229         u32 status;
230
231         return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
232                         !(status & SR_BUSY), 10, 100000);
233 }
234
235 static inline void stm32_cryp_enable(struct stm32_cryp *cryp)
236 {
237         writel_relaxed(readl_relaxed(cryp->regs + CRYP_CR) | CR_CRYPEN, cryp->regs + CRYP_CR);
238 }
239
240 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
241 {
242         u32 status;
243
244         return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status,
245                         !(status & CR_CRYPEN), 10, 100000);
246 }
247
248 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
249 {
250         u32 status;
251
252         return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
253                         status & SR_OFNE, 10, 100000);
254 }
255
256 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
257 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
258
259 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
260 {
261         struct stm32_cryp *tmp, *cryp = NULL;
262
263         spin_lock_bh(&cryp_list.lock);
264         if (!ctx->cryp) {
265                 list_for_each_entry(tmp, &cryp_list.dev_list, list) {
266                         cryp = tmp;
267                         break;
268                 }
269                 ctx->cryp = cryp;
270         } else {
271                 cryp = ctx->cryp;
272         }
273
274         spin_unlock_bh(&cryp_list.lock);
275
276         return cryp;
277 }
278
279 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
280 {
281         if (!iv)
282                 return;
283
284         stm32_cryp_write(cryp, CRYP_IV0LR, be32_to_cpu(*iv++));
285         stm32_cryp_write(cryp, CRYP_IV0RR, be32_to_cpu(*iv++));
286
287         if (is_aes(cryp)) {
288                 stm32_cryp_write(cryp, CRYP_IV1LR, be32_to_cpu(*iv++));
289                 stm32_cryp_write(cryp, CRYP_IV1RR, be32_to_cpu(*iv++));
290         }
291 }
292
293 static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
294 {
295         struct skcipher_request *req = cryp->req;
296         __be32 *tmp = (void *)req->iv;
297
298         if (!tmp)
299                 return;
300
301         *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
302         *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
303
304         if (is_aes(cryp)) {
305                 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
306                 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
307         }
308 }
309
310 static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
311 {
312         unsigned int i;
313         int r_id;
314
315         if (is_des(c)) {
316                 stm32_cryp_write(c, CRYP_K1LR, be32_to_cpu(c->ctx->key[0]));
317                 stm32_cryp_write(c, CRYP_K1RR, be32_to_cpu(c->ctx->key[1]));
318         } else {
319                 r_id = CRYP_K3RR;
320                 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
321                         stm32_cryp_write(c, r_id,
322                                          be32_to_cpu(c->ctx->key[i - 1]));
323         }
324 }
325
326 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
327 {
328         if (is_aes(cryp) && is_ecb(cryp))
329                 return CR_AES_ECB;
330
331         if (is_aes(cryp) && is_cbc(cryp))
332                 return CR_AES_CBC;
333
334         if (is_aes(cryp) && is_ctr(cryp))
335                 return CR_AES_CTR;
336
337         if (is_aes(cryp) && is_gcm(cryp))
338                 return CR_AES_GCM;
339
340         if (is_aes(cryp) && is_ccm(cryp))
341                 return CR_AES_CCM;
342
343         if (is_des(cryp) && is_ecb(cryp))
344                 return CR_DES_ECB;
345
346         if (is_des(cryp) && is_cbc(cryp))
347                 return CR_DES_CBC;
348
349         if (is_tdes(cryp) && is_ecb(cryp))
350                 return CR_TDES_ECB;
351
352         if (is_tdes(cryp) && is_cbc(cryp))
353                 return CR_TDES_CBC;
354
355         dev_err(cryp->dev, "Unknown mode\n");
356         return CR_AES_UNKNOWN;
357 }
358
359 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
360 {
361         return is_encrypt(cryp) ? cryp->areq->cryptlen :
362                                   cryp->areq->cryptlen - cryp->authsize;
363 }
364
365 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
366 {
367         int ret;
368         __be32 iv[4];
369
370         /* Phase 1 : init */
371         memcpy(iv, cryp->areq->iv, 12);
372         iv[3] = cpu_to_be32(GCM_CTR_INIT);
373         cryp->gcm_ctr = GCM_CTR_INIT;
374         stm32_cryp_hw_write_iv(cryp, iv);
375
376         stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
377
378         /* Wait for end of processing */
379         ret = stm32_cryp_wait_enable(cryp);
380         if (ret) {
381                 dev_err(cryp->dev, "Timeout (gcm init)\n");
382                 return ret;
383         }
384
385         /* Prepare next phase */
386         if (cryp->areq->assoclen) {
387                 cfg |= CR_PH_HEADER;
388                 stm32_cryp_write(cryp, CRYP_CR, cfg);
389         } else if (stm32_cryp_get_input_text_len(cryp)) {
390                 cfg |= CR_PH_PAYLOAD;
391                 stm32_cryp_write(cryp, CRYP_CR, cfg);
392         }
393
394         return 0;
395 }
396
397 static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
398 {
399         u32 cfg;
400         int err;
401
402         /* Check if whole header written */
403         if (!cryp->header_in) {
404                 /* Wait for completion */
405                 err = stm32_cryp_wait_busy(cryp);
406                 if (err) {
407                         dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
408                         stm32_cryp_write(cryp, CRYP_IMSCR, 0);
409                         stm32_cryp_finish_req(cryp, err);
410                         return;
411                 }
412
413                 if (stm32_cryp_get_input_text_len(cryp)) {
414                         /* Phase 3 : payload */
415                         cfg = stm32_cryp_read(cryp, CRYP_CR);
416                         cfg &= ~CR_CRYPEN;
417                         stm32_cryp_write(cryp, CRYP_CR, cfg);
418
419                         cfg &= ~CR_PH_MASK;
420                         cfg |= CR_PH_PAYLOAD | CR_CRYPEN;
421                         stm32_cryp_write(cryp, CRYP_CR, cfg);
422                 } else {
423                         /*
424                          * Phase 4 : tag.
425                          * Nothing to read, nothing to write, caller have to
426                          * end request
427                          */
428                 }
429         }
430 }
431
432 static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
433 {
434         unsigned int i;
435         size_t written;
436         size_t len;
437         u32 alen = cryp->areq->assoclen;
438         u32 block[AES_BLOCK_32] = {0};
439         u8 *b8 = (u8 *)block;
440
441         if (alen <= 65280) {
442                 /* Write first u32 of B1 */
443                 b8[0] = (alen >> 8) & 0xFF;
444                 b8[1] = alen & 0xFF;
445                 len = 2;
446         } else {
447                 /* Build the two first u32 of B1 */
448                 b8[0] = 0xFF;
449                 b8[1] = 0xFE;
450                 b8[2] = (alen & 0xFF000000) >> 24;
451                 b8[3] = (alen & 0x00FF0000) >> 16;
452                 b8[4] = (alen & 0x0000FF00) >> 8;
453                 b8[5] = alen & 0x000000FF;
454                 len = 6;
455         }
456
457         written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
458
459         scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0);
460         for (i = 0; i < AES_BLOCK_32; i++)
461                 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
462
463         cryp->header_in -= written;
464
465         stm32_crypt_gcmccm_end_header(cryp);
466 }
467
468 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
469 {
470         int ret;
471         u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32];
472         u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32;
473         __be32 *bd;
474         u32 *d;
475         unsigned int i, textlen;
476
477         /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
478         memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
479         memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
480         iv[AES_BLOCK_SIZE - 1] = 1;
481         stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
482
483         /* Build B0 */
484         memcpy(b0, iv, AES_BLOCK_SIZE);
485
486         b0[0] |= (8 * ((cryp->authsize - 2) / 2));
487
488         if (cryp->areq->assoclen)
489                 b0[0] |= 0x40;
490
491         textlen = stm32_cryp_get_input_text_len(cryp);
492
493         b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
494         b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
495
496         /* Enable HW */
497         stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
498
499         /* Write B0 */
500         d = (u32 *)b0;
501         bd = (__be32 *)b0;
502
503         for (i = 0; i < AES_BLOCK_32; i++) {
504                 u32 xd = d[i];
505
506                 if (!cryp->caps->padding_wa)
507                         xd = be32_to_cpu(bd[i]);
508                 stm32_cryp_write(cryp, CRYP_DIN, xd);
509         }
510
511         /* Wait for end of processing */
512         ret = stm32_cryp_wait_enable(cryp);
513         if (ret) {
514                 dev_err(cryp->dev, "Timeout (ccm init)\n");
515                 return ret;
516         }
517
518         /* Prepare next phase */
519         if (cryp->areq->assoclen) {
520                 cfg |= CR_PH_HEADER | CR_CRYPEN;
521                 stm32_cryp_write(cryp, CRYP_CR, cfg);
522
523                 /* Write first (special) block (may move to next phase [payload]) */
524                 stm32_cryp_write_ccm_first_header(cryp);
525         } else if (stm32_cryp_get_input_text_len(cryp)) {
526                 cfg |= CR_PH_PAYLOAD;
527                 stm32_cryp_write(cryp, CRYP_CR, cfg);
528         }
529
530         return 0;
531 }
532
533 static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
534 {
535         int ret;
536         u32 cfg, hw_mode;
537
538         pm_runtime_get_sync(cryp->dev);
539
540         /* Disable interrupt */
541         stm32_cryp_write(cryp, CRYP_IMSCR, 0);
542
543         /* Set configuration */
544         cfg = CR_DATA8 | CR_FFLUSH;
545
546         switch (cryp->ctx->keylen) {
547         case AES_KEYSIZE_128:
548                 cfg |= CR_KEY128;
549                 break;
550
551         case AES_KEYSIZE_192:
552                 cfg |= CR_KEY192;
553                 break;
554
555         default:
556         case AES_KEYSIZE_256:
557                 cfg |= CR_KEY256;
558                 break;
559         }
560
561         hw_mode = stm32_cryp_get_hw_mode(cryp);
562         if (hw_mode == CR_AES_UNKNOWN)
563                 return -EINVAL;
564
565         /* AES ECB/CBC decrypt: run key preparation first */
566         if (is_decrypt(cryp) &&
567             ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
568                 /* Configure in key preparation mode */
569                 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP);
570
571                 /* Set key only after full configuration done */
572                 stm32_cryp_hw_write_key(cryp);
573
574                 /* Start prepare key */
575                 stm32_cryp_enable(cryp);
576                 /* Wait for end of processing */
577                 ret = stm32_cryp_wait_busy(cryp);
578                 if (ret) {
579                         dev_err(cryp->dev, "Timeout (key preparation)\n");
580                         return ret;
581                 }
582
583                 cfg |= hw_mode | CR_DEC_NOT_ENC;
584
585                 /* Apply updated config (Decrypt + algo) and flush */
586                 stm32_cryp_write(cryp, CRYP_CR, cfg);
587         } else {
588                 cfg |= hw_mode;
589                 if (is_decrypt(cryp))
590                         cfg |= CR_DEC_NOT_ENC;
591
592                 /* Apply config and flush */
593                 stm32_cryp_write(cryp, CRYP_CR, cfg);
594
595                 /* Set key only after configuration done */
596                 stm32_cryp_hw_write_key(cryp);
597         }
598
599         switch (hw_mode) {
600         case CR_AES_GCM:
601         case CR_AES_CCM:
602                 /* Phase 1 : init */
603                 if (hw_mode == CR_AES_CCM)
604                         ret = stm32_cryp_ccm_init(cryp, cfg);
605                 else
606                         ret = stm32_cryp_gcm_init(cryp, cfg);
607
608                 if (ret)
609                         return ret;
610
611                 break;
612
613         case CR_DES_CBC:
614         case CR_TDES_CBC:
615         case CR_AES_CBC:
616         case CR_AES_CTR:
617                 stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
618                 break;
619
620         default:
621                 break;
622         }
623
624         /* Enable now */
625         stm32_cryp_enable(cryp);
626
627         return 0;
628 }
629
630 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
631 {
632         if (!err && (is_gcm(cryp) || is_ccm(cryp)))
633                 /* Phase 4 : output tag */
634                 err = stm32_cryp_read_auth_tag(cryp);
635
636         if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
637                 stm32_cryp_get_iv(cryp);
638
639         pm_runtime_mark_last_busy(cryp->dev);
640         pm_runtime_put_autosuspend(cryp->dev);
641
642         if (is_gcm(cryp) || is_ccm(cryp))
643                 crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
644         else
645                 crypto_finalize_skcipher_request(cryp->engine, cryp->req,
646                                                    err);
647 }
648
649 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
650 {
651         /* Enable interrupt and let the IRQ handler do everything */
652         stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
653
654         return 0;
655 }
656
657 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
658 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
659                                          void *areq);
660
661 static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
662 {
663         struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
664
665         crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
666
667         ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
668         ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
669         ctx->enginectx.op.unprepare_request = NULL;
670         return 0;
671 }
672
673 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
674 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine,
675                                        void *areq);
676
677 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
678 {
679         struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
680
681         tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
682
683         ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req;
684         ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req;
685         ctx->enginectx.op.unprepare_request = NULL;
686
687         return 0;
688 }
689
690 static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
691 {
692         struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
693                         crypto_skcipher_reqtfm(req));
694         struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
695         struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
696
697         if (!cryp)
698                 return -ENODEV;
699
700         rctx->mode = mode;
701
702         return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
703 }
704
705 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
706 {
707         struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
708         struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
709         struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
710
711         if (!cryp)
712                 return -ENODEV;
713
714         rctx->mode = mode;
715
716         return crypto_transfer_aead_request_to_engine(cryp->engine, req);
717 }
718
719 static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
720                              unsigned int keylen)
721 {
722         struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
723
724         memcpy(ctx->key, key, keylen);
725         ctx->keylen = keylen;
726
727         return 0;
728 }
729
730 static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
731                                  unsigned int keylen)
732 {
733         if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
734             keylen != AES_KEYSIZE_256)
735                 return -EINVAL;
736         else
737                 return stm32_cryp_setkey(tfm, key, keylen);
738 }
739
740 static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
741                                  unsigned int keylen)
742 {
743         return verify_skcipher_des_key(tfm, key) ?:
744                stm32_cryp_setkey(tfm, key, keylen);
745 }
746
747 static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
748                                   unsigned int keylen)
749 {
750         return verify_skcipher_des3_key(tfm, key) ?:
751                stm32_cryp_setkey(tfm, key, keylen);
752 }
753
754 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
755                                       unsigned int keylen)
756 {
757         struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
758
759         if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
760             keylen != AES_KEYSIZE_256)
761                 return -EINVAL;
762
763         memcpy(ctx->key, key, keylen);
764         ctx->keylen = keylen;
765
766         return 0;
767 }
768
769 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
770                                           unsigned int authsize)
771 {
772         switch (authsize) {
773         case 4:
774         case 8:
775         case 12:
776         case 13:
777         case 14:
778         case 15:
779         case 16:
780                 break;
781         default:
782                 return -EINVAL;
783         }
784
785         return 0;
786 }
787
788 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
789                                           unsigned int authsize)
790 {
791         switch (authsize) {
792         case 4:
793         case 6:
794         case 8:
795         case 10:
796         case 12:
797         case 14:
798         case 16:
799                 break;
800         default:
801                 return -EINVAL;
802         }
803
804         return 0;
805 }
806
807 static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
808 {
809         if (req->cryptlen % AES_BLOCK_SIZE)
810                 return -EINVAL;
811
812         if (req->cryptlen == 0)
813                 return 0;
814
815         return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
816 }
817
818 static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
819 {
820         if (req->cryptlen % AES_BLOCK_SIZE)
821                 return -EINVAL;
822
823         if (req->cryptlen == 0)
824                 return 0;
825
826         return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
827 }
828
829 static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
830 {
831         if (req->cryptlen % AES_BLOCK_SIZE)
832                 return -EINVAL;
833
834         if (req->cryptlen == 0)
835                 return 0;
836
837         return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
838 }
839
840 static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
841 {
842         if (req->cryptlen % AES_BLOCK_SIZE)
843                 return -EINVAL;
844
845         if (req->cryptlen == 0)
846                 return 0;
847
848         return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
849 }
850
851 static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
852 {
853         if (req->cryptlen == 0)
854                 return 0;
855
856         return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
857 }
858
859 static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
860 {
861         if (req->cryptlen == 0)
862                 return 0;
863
864         return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
865 }
866
867 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
868 {
869         return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
870 }
871
872 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
873 {
874         return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
875 }
876
877 static inline int crypto_ccm_check_iv(const u8 *iv)
878 {
879         /* 2 <= L <= 8, so 1 <= L' <= 7. */
880         if (iv[0] < 1 || iv[0] > 7)
881                 return -EINVAL;
882
883         return 0;
884 }
885
886 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
887 {
888         int err;
889
890         err = crypto_ccm_check_iv(req->iv);
891         if (err)
892                 return err;
893
894         return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
895 }
896
897 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
898 {
899         int err;
900
901         err = crypto_ccm_check_iv(req->iv);
902         if (err)
903                 return err;
904
905         return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
906 }
907
908 static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
909 {
910         if (req->cryptlen % DES_BLOCK_SIZE)
911                 return -EINVAL;
912
913         if (req->cryptlen == 0)
914                 return 0;
915
916         return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
917 }
918
919 static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
920 {
921         if (req->cryptlen % DES_BLOCK_SIZE)
922                 return -EINVAL;
923
924         if (req->cryptlen == 0)
925                 return 0;
926
927         return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
928 }
929
930 static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
931 {
932         if (req->cryptlen % DES_BLOCK_SIZE)
933                 return -EINVAL;
934
935         if (req->cryptlen == 0)
936                 return 0;
937
938         return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
939 }
940
941 static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
942 {
943         if (req->cryptlen % DES_BLOCK_SIZE)
944                 return -EINVAL;
945
946         if (req->cryptlen == 0)
947                 return 0;
948
949         return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
950 }
951
952 static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
953 {
954         if (req->cryptlen % DES_BLOCK_SIZE)
955                 return -EINVAL;
956
957         if (req->cryptlen == 0)
958                 return 0;
959
960         return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
961 }
962
963 static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
964 {
965         if (req->cryptlen % DES_BLOCK_SIZE)
966                 return -EINVAL;
967
968         if (req->cryptlen == 0)
969                 return 0;
970
971         return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
972 }
973
974 static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
975 {
976         if (req->cryptlen % DES_BLOCK_SIZE)
977                 return -EINVAL;
978
979         if (req->cryptlen == 0)
980                 return 0;
981
982         return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
983 }
984
985 static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
986 {
987         if (req->cryptlen % DES_BLOCK_SIZE)
988                 return -EINVAL;
989
990         if (req->cryptlen == 0)
991                 return 0;
992
993         return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
994 }
995
996 static int stm32_cryp_prepare_req(struct skcipher_request *req,
997                                   struct aead_request *areq)
998 {
999         struct stm32_cryp_ctx *ctx;
1000         struct stm32_cryp *cryp;
1001         struct stm32_cryp_reqctx *rctx;
1002         struct scatterlist *in_sg;
1003         int ret;
1004
1005         if (!req && !areq)
1006                 return -EINVAL;
1007
1008         ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
1009                     crypto_aead_ctx(crypto_aead_reqtfm(areq));
1010
1011         cryp = ctx->cryp;
1012
1013         if (!cryp)
1014                 return -ENODEV;
1015
1016         rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
1017         rctx->mode &= FLG_MODE_MASK;
1018
1019         ctx->cryp = cryp;
1020
1021         cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
1022         cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
1023         cryp->ctx = ctx;
1024
1025         if (req) {
1026                 cryp->req = req;
1027                 cryp->areq = NULL;
1028                 cryp->header_in = 0;
1029                 cryp->payload_in = req->cryptlen;
1030                 cryp->payload_out = req->cryptlen;
1031                 cryp->authsize = 0;
1032         } else {
1033                 /*
1034                  * Length of input and output data:
1035                  * Encryption case:
1036                  *  INPUT  = AssocData   ||     PlainText
1037                  *          <- assoclen ->  <- cryptlen ->
1038                  *
1039                  *  OUTPUT = AssocData    ||   CipherText   ||      AuthTag
1040                  *          <- assoclen ->  <-- cryptlen -->  <- authsize ->
1041                  *
1042                  * Decryption case:
1043                  *  INPUT  =  AssocData     ||    CipherTex   ||       AuthTag
1044                  *          <- assoclen --->  <---------- cryptlen ---------->
1045                  *
1046                  *  OUTPUT = AssocData    ||               PlainText
1047                  *          <- assoclen ->  <- cryptlen - authsize ->
1048                  */
1049                 cryp->areq = areq;
1050                 cryp->req = NULL;
1051                 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
1052                 if (is_encrypt(cryp)) {
1053                         cryp->payload_in = areq->cryptlen;
1054                         cryp->header_in = areq->assoclen;
1055                         cryp->payload_out = areq->cryptlen;
1056                 } else {
1057                         cryp->payload_in = areq->cryptlen - cryp->authsize;
1058                         cryp->header_in = areq->assoclen;
1059                         cryp->payload_out = cryp->payload_in;
1060                 }
1061         }
1062
1063         in_sg = req ? req->src : areq->src;
1064         scatterwalk_start(&cryp->in_walk, in_sg);
1065
1066         cryp->out_sg = req ? req->dst : areq->dst;
1067         scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1068
1069         if (is_gcm(cryp) || is_ccm(cryp)) {
1070                 /* In output, jump after assoc data */
1071                 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2);
1072         }
1073
1074         if (is_ctr(cryp))
1075                 memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
1076
1077         ret = stm32_cryp_hw_init(cryp);
1078         return ret;
1079 }
1080
1081 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
1082                                          void *areq)
1083 {
1084         struct skcipher_request *req = container_of(areq,
1085                                                       struct skcipher_request,
1086                                                       base);
1087
1088         return stm32_cryp_prepare_req(req, NULL);
1089 }
1090
1091 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1092 {
1093         struct skcipher_request *req = container_of(areq,
1094                                                       struct skcipher_request,
1095                                                       base);
1096         struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1097                         crypto_skcipher_reqtfm(req));
1098         struct stm32_cryp *cryp = ctx->cryp;
1099
1100         if (!cryp)
1101                 return -ENODEV;
1102
1103         return stm32_cryp_cpu_start(cryp);
1104 }
1105
1106 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq)
1107 {
1108         struct aead_request *req = container_of(areq, struct aead_request,
1109                                                 base);
1110
1111         return stm32_cryp_prepare_req(NULL, req);
1112 }
1113
1114 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1115 {
1116         struct aead_request *req = container_of(areq, struct aead_request,
1117                                                 base);
1118         struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1119         struct stm32_cryp *cryp = ctx->cryp;
1120
1121         if (!cryp)
1122                 return -ENODEV;
1123
1124         if (unlikely(!cryp->payload_in && !cryp->header_in)) {
1125                 /* No input data to process: get tag and finish */
1126                 stm32_cryp_finish_req(cryp, 0);
1127                 return 0;
1128         }
1129
1130         return stm32_cryp_cpu_start(cryp);
1131 }
1132
1133 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1134 {
1135         u32 cfg, size_bit;
1136         unsigned int i;
1137         int ret = 0;
1138
1139         /* Update Config */
1140         cfg = stm32_cryp_read(cryp, CRYP_CR);
1141
1142         cfg &= ~CR_PH_MASK;
1143         cfg |= CR_PH_FINAL;
1144         cfg &= ~CR_DEC_NOT_ENC;
1145         cfg |= CR_CRYPEN;
1146
1147         stm32_cryp_write(cryp, CRYP_CR, cfg);
1148
1149         if (is_gcm(cryp)) {
1150                 /* GCM: write aad and payload size (in bits) */
1151                 size_bit = cryp->areq->assoclen * 8;
1152                 if (cryp->caps->swap_final)
1153                         size_bit = (__force u32)cpu_to_be32(size_bit);
1154
1155                 stm32_cryp_write(cryp, CRYP_DIN, 0);
1156                 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1157
1158                 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1159                                 cryp->areq->cryptlen - cryp->authsize;
1160                 size_bit *= 8;
1161                 if (cryp->caps->swap_final)
1162                         size_bit = (__force u32)cpu_to_be32(size_bit);
1163
1164                 stm32_cryp_write(cryp, CRYP_DIN, 0);
1165                 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1166         } else {
1167                 /* CCM: write CTR0 */
1168                 u32 iv32[AES_BLOCK_32];
1169                 u8 *iv = (u8 *)iv32;
1170                 __be32 *biv = (__be32 *)iv32;
1171
1172                 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1173                 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1174
1175                 for (i = 0; i < AES_BLOCK_32; i++) {
1176                         u32 xiv = iv32[i];
1177
1178                         if (!cryp->caps->padding_wa)
1179                                 xiv = be32_to_cpu(biv[i]);
1180                         stm32_cryp_write(cryp, CRYP_DIN, xiv);
1181                 }
1182         }
1183
1184         /* Wait for output data */
1185         ret = stm32_cryp_wait_output(cryp);
1186         if (ret) {
1187                 dev_err(cryp->dev, "Timeout (read tag)\n");
1188                 return ret;
1189         }
1190
1191         if (is_encrypt(cryp)) {
1192                 u32 out_tag[AES_BLOCK_32];
1193
1194                 /* Get and write tag */
1195                 for (i = 0; i < AES_BLOCK_32; i++)
1196                         out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1197
1198                 scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1);
1199         } else {
1200                 /* Get and check tag */
1201                 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1202
1203                 scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0);
1204
1205                 for (i = 0; i < AES_BLOCK_32; i++)
1206                         out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1207
1208                 if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1209                         ret = -EBADMSG;
1210         }
1211
1212         /* Disable cryp */
1213         cfg &= ~CR_CRYPEN;
1214         stm32_cryp_write(cryp, CRYP_CR, cfg);
1215
1216         return ret;
1217 }
1218
1219 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1220 {
1221         u32 cr;
1222
1223         if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
1224                 /*
1225                  * In this case, we need to increment manually the ctr counter,
1226                  * as HW doesn't handle the U32 carry.
1227                  */
1228                 crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
1229
1230                 cr = stm32_cryp_read(cryp, CRYP_CR);
1231                 stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
1232
1233                 stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
1234
1235                 stm32_cryp_write(cryp, CRYP_CR, cr);
1236         }
1237
1238         /* The IV registers are BE  */
1239         cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
1240         cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
1241         cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
1242         cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
1243 }
1244
1245 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1246 {
1247         unsigned int i;
1248         u32 block[AES_BLOCK_32];
1249
1250         for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1251                 block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1252
1253         scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1254                                                              cryp->payload_out), 1);
1255         cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1256                                    cryp->payload_out);
1257 }
1258
1259 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1260 {
1261         unsigned int i;
1262         u32 block[AES_BLOCK_32] = {0};
1263
1264         scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
1265                                                             cryp->payload_in), 0);
1266         for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1267                 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1268
1269         cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
1270 }
1271
1272 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1273 {
1274         int err;
1275         u32 cfg, block[AES_BLOCK_32] = {0};
1276         unsigned int i;
1277
1278         /* 'Special workaround' procedure described in the datasheet */
1279
1280         /* a) disable ip */
1281         stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1282         cfg = stm32_cryp_read(cryp, CRYP_CR);
1283         cfg &= ~CR_CRYPEN;
1284         stm32_cryp_write(cryp, CRYP_CR, cfg);
1285
1286         /* b) Update IV1R */
1287         stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2);
1288
1289         /* c) change mode to CTR */
1290         cfg &= ~CR_ALGO_MASK;
1291         cfg |= CR_AES_CTR;
1292         stm32_cryp_write(cryp, CRYP_CR, cfg);
1293
1294         /* a) enable IP */
1295         cfg |= CR_CRYPEN;
1296         stm32_cryp_write(cryp, CRYP_CR, cfg);
1297
1298         /* b) pad and write the last block */
1299         stm32_cryp_irq_write_block(cryp);
1300         /* wait end of process */
1301         err = stm32_cryp_wait_output(cryp);
1302         if (err) {
1303                 dev_err(cryp->dev, "Timeout (write gcm last data)\n");
1304                 return stm32_cryp_finish_req(cryp, err);
1305         }
1306
1307         /* c) get and store encrypted data */
1308         /*
1309          * Same code as stm32_cryp_irq_read_data(), but we want to store
1310          * block value
1311          */
1312         for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1313                 block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1314
1315         scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1316                                                              cryp->payload_out), 1);
1317         cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1318                                    cryp->payload_out);
1319
1320         /* d) change mode back to AES GCM */
1321         cfg &= ~CR_ALGO_MASK;
1322         cfg |= CR_AES_GCM;
1323         stm32_cryp_write(cryp, CRYP_CR, cfg);
1324
1325         /* e) change phase to Final */
1326         cfg &= ~CR_PH_MASK;
1327         cfg |= CR_PH_FINAL;
1328         stm32_cryp_write(cryp, CRYP_CR, cfg);
1329
1330         /* f) write padded data */
1331         for (i = 0; i < AES_BLOCK_32; i++)
1332                 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1333
1334         /* g) Empty fifo out */
1335         err = stm32_cryp_wait_output(cryp);
1336         if (err) {
1337                 dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
1338                 return stm32_cryp_finish_req(cryp, err);
1339         }
1340
1341         for (i = 0; i < AES_BLOCK_32; i++)
1342                 stm32_cryp_read(cryp, CRYP_DOUT);
1343
1344         /* h) run the he normal Final phase */
1345         stm32_cryp_finish_req(cryp, 0);
1346 }
1347
1348 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
1349 {
1350         u32 cfg;
1351
1352         /* disable ip, set NPBLB and reneable ip */
1353         cfg = stm32_cryp_read(cryp, CRYP_CR);
1354         cfg &= ~CR_CRYPEN;
1355         stm32_cryp_write(cryp, CRYP_CR, cfg);
1356
1357         cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
1358         cfg |= CR_CRYPEN;
1359         stm32_cryp_write(cryp, CRYP_CR, cfg);
1360 }
1361
1362 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
1363 {
1364         int err = 0;
1365         u32 cfg, iv1tmp;
1366         u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32];
1367         u32 block[AES_BLOCK_32] = {0};
1368         unsigned int i;
1369
1370         /* 'Special workaround' procedure described in the datasheet */
1371
1372         /* a) disable ip */
1373         stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1374
1375         cfg = stm32_cryp_read(cryp, CRYP_CR);
1376         cfg &= ~CR_CRYPEN;
1377         stm32_cryp_write(cryp, CRYP_CR, cfg);
1378
1379         /* b) get IV1 from CRYP_CSGCMCCM7 */
1380         iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
1381
1382         /* c) Load CRYP_CSGCMCCMxR */
1383         for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
1384                 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1385
1386         /* d) Write IV1R */
1387         stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp);
1388
1389         /* e) change mode to CTR */
1390         cfg &= ~CR_ALGO_MASK;
1391         cfg |= CR_AES_CTR;
1392         stm32_cryp_write(cryp, CRYP_CR, cfg);
1393
1394         /* a) enable IP */
1395         cfg |= CR_CRYPEN;
1396         stm32_cryp_write(cryp, CRYP_CR, cfg);
1397
1398         /* b) pad and write the last block */
1399         stm32_cryp_irq_write_block(cryp);
1400         /* wait end of process */
1401         err = stm32_cryp_wait_output(cryp);
1402         if (err) {
1403                 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1404                 return stm32_cryp_finish_req(cryp, err);
1405         }
1406
1407         /* c) get and store decrypted data */
1408         /*
1409          * Same code as stm32_cryp_irq_read_data(), but we want to store
1410          * block value
1411          */
1412         for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++)
1413                 block[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1414
1415         scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1416                                                              cryp->payload_out), 1);
1417         cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
1418
1419         /* d) Load again CRYP_CSGCMCCMxR */
1420         for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
1421                 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1422
1423         /* e) change mode back to AES CCM */
1424         cfg &= ~CR_ALGO_MASK;
1425         cfg |= CR_AES_CCM;
1426         stm32_cryp_write(cryp, CRYP_CR, cfg);
1427
1428         /* f) change phase to header */
1429         cfg &= ~CR_PH_MASK;
1430         cfg |= CR_PH_HEADER;
1431         stm32_cryp_write(cryp, CRYP_CR, cfg);
1432
1433         /* g) XOR and write padded data */
1434         for (i = 0; i < ARRAY_SIZE(block); i++) {
1435                 block[i] ^= cstmp1[i];
1436                 block[i] ^= cstmp2[i];
1437                 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1438         }
1439
1440         /* h) wait for completion */
1441         err = stm32_cryp_wait_busy(cryp);
1442         if (err)
1443                 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1444
1445         /* i) run the he normal Final phase */
1446         stm32_cryp_finish_req(cryp, err);
1447 }
1448
1449 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
1450 {
1451         if (unlikely(!cryp->payload_in)) {
1452                 dev_warn(cryp->dev, "No more data to process\n");
1453                 return;
1454         }
1455
1456         if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
1457                      (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
1458                      is_encrypt(cryp))) {
1459                 /* Padding for AES GCM encryption */
1460                 if (cryp->caps->padding_wa) {
1461                         /* Special case 1 */
1462                         stm32_cryp_irq_write_gcm_padded_data(cryp);
1463                         return;
1464                 }
1465
1466                 /* Setting padding bytes (NBBLB) */
1467                 stm32_cryp_irq_set_npblb(cryp);
1468         }
1469
1470         if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
1471                      (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
1472                      is_decrypt(cryp))) {
1473                 /* Padding for AES CCM decryption */
1474                 if (cryp->caps->padding_wa) {
1475                         /* Special case 2 */
1476                         stm32_cryp_irq_write_ccm_padded_data(cryp);
1477                         return;
1478                 }
1479
1480                 /* Setting padding bytes (NBBLB) */
1481                 stm32_cryp_irq_set_npblb(cryp);
1482         }
1483
1484         if (is_aes(cryp) && is_ctr(cryp))
1485                 stm32_cryp_check_ctr_counter(cryp);
1486
1487         stm32_cryp_irq_write_block(cryp);
1488 }
1489
1490 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
1491 {
1492         unsigned int i;
1493         u32 block[AES_BLOCK_32] = {0};
1494         size_t written;
1495
1496         written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
1497
1498         scatterwalk_copychunks(block, &cryp->in_walk, written, 0);
1499         for (i = 0; i < AES_BLOCK_32; i++)
1500                 stm32_cryp_write(cryp, CRYP_DIN, block[i]);
1501
1502         cryp->header_in -= written;
1503
1504         stm32_crypt_gcmccm_end_header(cryp);
1505 }
1506
1507 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
1508 {
1509         struct stm32_cryp *cryp = arg;
1510         u32 ph;
1511         u32 it_mask = stm32_cryp_read(cryp, CRYP_IMSCR);
1512
1513         if (cryp->irq_status & MISR_OUT)
1514                 /* Output FIFO IRQ: read data */
1515                 stm32_cryp_irq_read_data(cryp);
1516
1517         if (cryp->irq_status & MISR_IN) {
1518                 if (is_gcm(cryp) || is_ccm(cryp)) {
1519                         ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1520                         if (unlikely(ph == CR_PH_HEADER))
1521                                 /* Write Header */
1522                                 stm32_cryp_irq_write_gcmccm_header(cryp);
1523                         else
1524                                 /* Input FIFO IRQ: write data */
1525                                 stm32_cryp_irq_write_data(cryp);
1526                         if (is_gcm(cryp))
1527                                 cryp->gcm_ctr++;
1528                 } else {
1529                         /* Input FIFO IRQ: write data */
1530                         stm32_cryp_irq_write_data(cryp);
1531                 }
1532         }
1533
1534         /* Mask useless interrupts */
1535         if (!cryp->payload_in && !cryp->header_in)
1536                 it_mask &= ~IMSCR_IN;
1537         if (!cryp->payload_out)
1538                 it_mask &= ~IMSCR_OUT;
1539         stm32_cryp_write(cryp, CRYP_IMSCR, it_mask);
1540
1541         if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out)
1542                 stm32_cryp_finish_req(cryp, 0);
1543
1544         return IRQ_HANDLED;
1545 }
1546
1547 static irqreturn_t stm32_cryp_irq(int irq, void *arg)
1548 {
1549         struct stm32_cryp *cryp = arg;
1550
1551         cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
1552
1553         return IRQ_WAKE_THREAD;
1554 }
1555
1556 static struct skcipher_alg crypto_algs[] = {
1557 {
1558         .base.cra_name          = "ecb(aes)",
1559         .base.cra_driver_name   = "stm32-ecb-aes",
1560         .base.cra_priority      = 200,
1561         .base.cra_flags         = CRYPTO_ALG_ASYNC,
1562         .base.cra_blocksize     = AES_BLOCK_SIZE,
1563         .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1564         .base.cra_alignmask     = 0,
1565         .base.cra_module        = THIS_MODULE,
1566
1567         .init                   = stm32_cryp_init_tfm,
1568         .min_keysize            = AES_MIN_KEY_SIZE,
1569         .max_keysize            = AES_MAX_KEY_SIZE,
1570         .setkey                 = stm32_cryp_aes_setkey,
1571         .encrypt                = stm32_cryp_aes_ecb_encrypt,
1572         .decrypt                = stm32_cryp_aes_ecb_decrypt,
1573 },
1574 {
1575         .base.cra_name          = "cbc(aes)",
1576         .base.cra_driver_name   = "stm32-cbc-aes",
1577         .base.cra_priority      = 200,
1578         .base.cra_flags         = CRYPTO_ALG_ASYNC,
1579         .base.cra_blocksize     = AES_BLOCK_SIZE,
1580         .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1581         .base.cra_alignmask     = 0,
1582         .base.cra_module        = THIS_MODULE,
1583
1584         .init                   = stm32_cryp_init_tfm,
1585         .min_keysize            = AES_MIN_KEY_SIZE,
1586         .max_keysize            = AES_MAX_KEY_SIZE,
1587         .ivsize                 = AES_BLOCK_SIZE,
1588         .setkey                 = stm32_cryp_aes_setkey,
1589         .encrypt                = stm32_cryp_aes_cbc_encrypt,
1590         .decrypt                = stm32_cryp_aes_cbc_decrypt,
1591 },
1592 {
1593         .base.cra_name          = "ctr(aes)",
1594         .base.cra_driver_name   = "stm32-ctr-aes",
1595         .base.cra_priority      = 200,
1596         .base.cra_flags         = CRYPTO_ALG_ASYNC,
1597         .base.cra_blocksize     = 1,
1598         .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1599         .base.cra_alignmask     = 0,
1600         .base.cra_module        = THIS_MODULE,
1601
1602         .init                   = stm32_cryp_init_tfm,
1603         .min_keysize            = AES_MIN_KEY_SIZE,
1604         .max_keysize            = AES_MAX_KEY_SIZE,
1605         .ivsize                 = AES_BLOCK_SIZE,
1606         .setkey                 = stm32_cryp_aes_setkey,
1607         .encrypt                = stm32_cryp_aes_ctr_encrypt,
1608         .decrypt                = stm32_cryp_aes_ctr_decrypt,
1609 },
1610 {
1611         .base.cra_name          = "ecb(des)",
1612         .base.cra_driver_name   = "stm32-ecb-des",
1613         .base.cra_priority      = 200,
1614         .base.cra_flags         = CRYPTO_ALG_ASYNC,
1615         .base.cra_blocksize     = DES_BLOCK_SIZE,
1616         .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1617         .base.cra_alignmask     = 0,
1618         .base.cra_module        = THIS_MODULE,
1619
1620         .init                   = stm32_cryp_init_tfm,
1621         .min_keysize            = DES_BLOCK_SIZE,
1622         .max_keysize            = DES_BLOCK_SIZE,
1623         .setkey                 = stm32_cryp_des_setkey,
1624         .encrypt                = stm32_cryp_des_ecb_encrypt,
1625         .decrypt                = stm32_cryp_des_ecb_decrypt,
1626 },
1627 {
1628         .base.cra_name          = "cbc(des)",
1629         .base.cra_driver_name   = "stm32-cbc-des",
1630         .base.cra_priority      = 200,
1631         .base.cra_flags         = CRYPTO_ALG_ASYNC,
1632         .base.cra_blocksize     = DES_BLOCK_SIZE,
1633         .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1634         .base.cra_alignmask     = 0,
1635         .base.cra_module        = THIS_MODULE,
1636
1637         .init                   = stm32_cryp_init_tfm,
1638         .min_keysize            = DES_BLOCK_SIZE,
1639         .max_keysize            = DES_BLOCK_SIZE,
1640         .ivsize                 = DES_BLOCK_SIZE,
1641         .setkey                 = stm32_cryp_des_setkey,
1642         .encrypt                = stm32_cryp_des_cbc_encrypt,
1643         .decrypt                = stm32_cryp_des_cbc_decrypt,
1644 },
1645 {
1646         .base.cra_name          = "ecb(des3_ede)",
1647         .base.cra_driver_name   = "stm32-ecb-des3",
1648         .base.cra_priority      = 200,
1649         .base.cra_flags         = CRYPTO_ALG_ASYNC,
1650         .base.cra_blocksize     = DES_BLOCK_SIZE,
1651         .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1652         .base.cra_alignmask     = 0,
1653         .base.cra_module        = THIS_MODULE,
1654
1655         .init                   = stm32_cryp_init_tfm,
1656         .min_keysize            = 3 * DES_BLOCK_SIZE,
1657         .max_keysize            = 3 * DES_BLOCK_SIZE,
1658         .setkey                 = stm32_cryp_tdes_setkey,
1659         .encrypt                = stm32_cryp_tdes_ecb_encrypt,
1660         .decrypt                = stm32_cryp_tdes_ecb_decrypt,
1661 },
1662 {
1663         .base.cra_name          = "cbc(des3_ede)",
1664         .base.cra_driver_name   = "stm32-cbc-des3",
1665         .base.cra_priority      = 200,
1666         .base.cra_flags         = CRYPTO_ALG_ASYNC,
1667         .base.cra_blocksize     = DES_BLOCK_SIZE,
1668         .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1669         .base.cra_alignmask     = 0,
1670         .base.cra_module        = THIS_MODULE,
1671
1672         .init                   = stm32_cryp_init_tfm,
1673         .min_keysize            = 3 * DES_BLOCK_SIZE,
1674         .max_keysize            = 3 * DES_BLOCK_SIZE,
1675         .ivsize                 = DES_BLOCK_SIZE,
1676         .setkey                 = stm32_cryp_tdes_setkey,
1677         .encrypt                = stm32_cryp_tdes_cbc_encrypt,
1678         .decrypt                = stm32_cryp_tdes_cbc_decrypt,
1679 },
1680 };
1681
1682 static struct aead_alg aead_algs[] = {
1683 {
1684         .setkey         = stm32_cryp_aes_aead_setkey,
1685         .setauthsize    = stm32_cryp_aes_gcm_setauthsize,
1686         .encrypt        = stm32_cryp_aes_gcm_encrypt,
1687         .decrypt        = stm32_cryp_aes_gcm_decrypt,
1688         .init           = stm32_cryp_aes_aead_init,
1689         .ivsize         = 12,
1690         .maxauthsize    = AES_BLOCK_SIZE,
1691
1692         .base = {
1693                 .cra_name               = "gcm(aes)",
1694                 .cra_driver_name        = "stm32-gcm-aes",
1695                 .cra_priority           = 200,
1696                 .cra_flags              = CRYPTO_ALG_ASYNC,
1697                 .cra_blocksize          = 1,
1698                 .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1699                 .cra_alignmask          = 0,
1700                 .cra_module             = THIS_MODULE,
1701         },
1702 },
1703 {
1704         .setkey         = stm32_cryp_aes_aead_setkey,
1705         .setauthsize    = stm32_cryp_aes_ccm_setauthsize,
1706         .encrypt        = stm32_cryp_aes_ccm_encrypt,
1707         .decrypt        = stm32_cryp_aes_ccm_decrypt,
1708         .init           = stm32_cryp_aes_aead_init,
1709         .ivsize         = AES_BLOCK_SIZE,
1710         .maxauthsize    = AES_BLOCK_SIZE,
1711
1712         .base = {
1713                 .cra_name               = "ccm(aes)",
1714                 .cra_driver_name        = "stm32-ccm-aes",
1715                 .cra_priority           = 200,
1716                 .cra_flags              = CRYPTO_ALG_ASYNC,
1717                 .cra_blocksize          = 1,
1718                 .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1719                 .cra_alignmask          = 0,
1720                 .cra_module             = THIS_MODULE,
1721         },
1722 },
1723 };
1724
1725 static const struct stm32_cryp_caps f7_data = {
1726         .swap_final = true,
1727         .padding_wa = true,
1728 };
1729
1730 static const struct stm32_cryp_caps mp1_data = {
1731         .swap_final = false,
1732         .padding_wa = false,
1733 };
1734
1735 static const struct of_device_id stm32_dt_ids[] = {
1736         { .compatible = "st,stm32f756-cryp", .data = &f7_data},
1737         { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1738         {},
1739 };
1740 MODULE_DEVICE_TABLE(of, stm32_dt_ids);
1741
1742 static int stm32_cryp_probe(struct platform_device *pdev)
1743 {
1744         struct device *dev = &pdev->dev;
1745         struct stm32_cryp *cryp;
1746         struct reset_control *rst;
1747         int irq, ret;
1748
1749         cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
1750         if (!cryp)
1751                 return -ENOMEM;
1752
1753         cryp->caps = of_device_get_match_data(dev);
1754         if (!cryp->caps)
1755                 return -ENODEV;
1756
1757         cryp->dev = dev;
1758
1759         cryp->regs = devm_platform_ioremap_resource(pdev, 0);
1760         if (IS_ERR(cryp->regs))
1761                 return PTR_ERR(cryp->regs);
1762
1763         irq = platform_get_irq(pdev, 0);
1764         if (irq < 0)
1765                 return irq;
1766
1767         ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
1768                                         stm32_cryp_irq_thread, IRQF_ONESHOT,
1769                                         dev_name(dev), cryp);
1770         if (ret) {
1771                 dev_err(dev, "Cannot grab IRQ\n");
1772                 return ret;
1773         }
1774
1775         cryp->clk = devm_clk_get(dev, NULL);
1776         if (IS_ERR(cryp->clk)) {
1777                 dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n");
1778
1779                 return PTR_ERR(cryp->clk);
1780         }
1781
1782         ret = clk_prepare_enable(cryp->clk);
1783         if (ret) {
1784                 dev_err(cryp->dev, "Failed to enable clock\n");
1785                 return ret;
1786         }
1787
1788         pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
1789         pm_runtime_use_autosuspend(dev);
1790
1791         pm_runtime_get_noresume(dev);
1792         pm_runtime_set_active(dev);
1793         pm_runtime_enable(dev);
1794
1795         rst = devm_reset_control_get(dev, NULL);
1796         if (IS_ERR(rst)) {
1797                 ret = PTR_ERR(rst);
1798                 if (ret == -EPROBE_DEFER)
1799                         goto err_rst;
1800         } else {
1801                 reset_control_assert(rst);
1802                 udelay(2);
1803                 reset_control_deassert(rst);
1804         }
1805
1806         platform_set_drvdata(pdev, cryp);
1807
1808         spin_lock(&cryp_list.lock);
1809         list_add(&cryp->list, &cryp_list.dev_list);
1810         spin_unlock(&cryp_list.lock);
1811
1812         /* Initialize crypto engine */
1813         cryp->engine = crypto_engine_alloc_init(dev, 1);
1814         if (!cryp->engine) {
1815                 dev_err(dev, "Could not init crypto engine\n");
1816                 ret = -ENOMEM;
1817                 goto err_engine1;
1818         }
1819
1820         ret = crypto_engine_start(cryp->engine);
1821         if (ret) {
1822                 dev_err(dev, "Could not start crypto engine\n");
1823                 goto err_engine2;
1824         }
1825
1826         ret = crypto_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
1827         if (ret) {
1828                 dev_err(dev, "Could not register algs\n");
1829                 goto err_algs;
1830         }
1831
1832         ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1833         if (ret)
1834                 goto err_aead_algs;
1835
1836         dev_info(dev, "Initialized\n");
1837
1838         pm_runtime_put_sync(dev);
1839
1840         return 0;
1841
1842 err_aead_algs:
1843         crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
1844 err_algs:
1845 err_engine2:
1846         crypto_engine_exit(cryp->engine);
1847 err_engine1:
1848         spin_lock(&cryp_list.lock);
1849         list_del(&cryp->list);
1850         spin_unlock(&cryp_list.lock);
1851 err_rst:
1852         pm_runtime_disable(dev);
1853         pm_runtime_put_noidle(dev);
1854
1855         clk_disable_unprepare(cryp->clk);
1856
1857         return ret;
1858 }
1859
1860 static int stm32_cryp_remove(struct platform_device *pdev)
1861 {
1862         struct stm32_cryp *cryp = platform_get_drvdata(pdev);
1863         int ret;
1864
1865         if (!cryp)
1866                 return -ENODEV;
1867
1868         ret = pm_runtime_resume_and_get(cryp->dev);
1869         if (ret < 0)
1870                 return ret;
1871
1872         crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
1873         crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
1874
1875         crypto_engine_exit(cryp->engine);
1876
1877         spin_lock(&cryp_list.lock);
1878         list_del(&cryp->list);
1879         spin_unlock(&cryp_list.lock);
1880
1881         pm_runtime_disable(cryp->dev);
1882         pm_runtime_put_noidle(cryp->dev);
1883
1884         clk_disable_unprepare(cryp->clk);
1885
1886         return 0;
1887 }
1888
1889 #ifdef CONFIG_PM
1890 static int stm32_cryp_runtime_suspend(struct device *dev)
1891 {
1892         struct stm32_cryp *cryp = dev_get_drvdata(dev);
1893
1894         clk_disable_unprepare(cryp->clk);
1895
1896         return 0;
1897 }
1898
1899 static int stm32_cryp_runtime_resume(struct device *dev)
1900 {
1901         struct stm32_cryp *cryp = dev_get_drvdata(dev);
1902         int ret;
1903
1904         ret = clk_prepare_enable(cryp->clk);
1905         if (ret) {
1906                 dev_err(cryp->dev, "Failed to prepare_enable clock\n");
1907                 return ret;
1908         }
1909
1910         return 0;
1911 }
1912 #endif
1913
1914 static const struct dev_pm_ops stm32_cryp_pm_ops = {
1915         SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
1916                                 pm_runtime_force_resume)
1917         SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
1918                            stm32_cryp_runtime_resume, NULL)
1919 };
1920
1921 static struct platform_driver stm32_cryp_driver = {
1922         .probe  = stm32_cryp_probe,
1923         .remove = stm32_cryp_remove,
1924         .driver = {
1925                 .name           = DRIVER_NAME,
1926                 .pm             = &stm32_cryp_pm_ops,
1927                 .of_match_table = stm32_dt_ids,
1928         },
1929 };
1930
1931 module_platform_driver(stm32_cryp_driver);
1932
1933 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
1934 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
1935 MODULE_LICENSE("GPL");