GNU Linux-libre 4.19.245-gnu1
[releases.git] / drivers / crypto / stm32 / stm32-cryp.c
1 /*
2  * Copyright (C) STMicroelectronics SA 2017
3  * Author: Fabien Dessenne <fabien.dessenne@st.com>
4  * License terms:  GNU General Public License (GPL), version 2
5  */
6
7 #include <linux/clk.h>
8 #include <linux/delay.h>
9 #include <linux/interrupt.h>
10 #include <linux/iopoll.h>
11 #include <linux/module.h>
12 #include <linux/of_device.h>
13 #include <linux/platform_device.h>
14 #include <linux/pm_runtime.h>
15 #include <linux/reset.h>
16
17 #include <crypto/aes.h>
18 #include <crypto/des.h>
19 #include <crypto/engine.h>
20 #include <crypto/scatterwalk.h>
21 #include <crypto/internal/aead.h>
22
23 #define DRIVER_NAME             "stm32-cryp"
24
25 /* Bit [0] encrypt / decrypt */
26 #define FLG_ENCRYPT             BIT(0)
27 /* Bit [8..1] algo & operation mode */
28 #define FLG_AES                 BIT(1)
29 #define FLG_DES                 BIT(2)
30 #define FLG_TDES                BIT(3)
31 #define FLG_ECB                 BIT(4)
32 #define FLG_CBC                 BIT(5)
33 #define FLG_CTR                 BIT(6)
34 #define FLG_GCM                 BIT(7)
35 #define FLG_CCM                 BIT(8)
36 /* Mode mask = bits [15..0] */
37 #define FLG_MODE_MASK           GENMASK(15, 0)
38 /* Bit [31..16] status  */
39 #define FLG_CCM_PADDED_WA       BIT(16)
40
41 /* Registers */
42 #define CRYP_CR                 0x00000000
43 #define CRYP_SR                 0x00000004
44 #define CRYP_DIN                0x00000008
45 #define CRYP_DOUT               0x0000000C
46 #define CRYP_DMACR              0x00000010
47 #define CRYP_IMSCR              0x00000014
48 #define CRYP_RISR               0x00000018
49 #define CRYP_MISR               0x0000001C
50 #define CRYP_K0LR               0x00000020
51 #define CRYP_K0RR               0x00000024
52 #define CRYP_K1LR               0x00000028
53 #define CRYP_K1RR               0x0000002C
54 #define CRYP_K2LR               0x00000030
55 #define CRYP_K2RR               0x00000034
56 #define CRYP_K3LR               0x00000038
57 #define CRYP_K3RR               0x0000003C
58 #define CRYP_IV0LR              0x00000040
59 #define CRYP_IV0RR              0x00000044
60 #define CRYP_IV1LR              0x00000048
61 #define CRYP_IV1RR              0x0000004C
62 #define CRYP_CSGCMCCM0R         0x00000050
63 #define CRYP_CSGCM0R            0x00000070
64
65 /* Registers values */
66 #define CR_DEC_NOT_ENC          0x00000004
67 #define CR_TDES_ECB             0x00000000
68 #define CR_TDES_CBC             0x00000008
69 #define CR_DES_ECB              0x00000010
70 #define CR_DES_CBC              0x00000018
71 #define CR_AES_ECB              0x00000020
72 #define CR_AES_CBC              0x00000028
73 #define CR_AES_CTR              0x00000030
74 #define CR_AES_KP               0x00000038
75 #define CR_AES_GCM              0x00080000
76 #define CR_AES_CCM              0x00080008
77 #define CR_AES_UNKNOWN          0xFFFFFFFF
78 #define CR_ALGO_MASK            0x00080038
79 #define CR_DATA32               0x00000000
80 #define CR_DATA16               0x00000040
81 #define CR_DATA8                0x00000080
82 #define CR_DATA1                0x000000C0
83 #define CR_KEY128               0x00000000
84 #define CR_KEY192               0x00000100
85 #define CR_KEY256               0x00000200
86 #define CR_FFLUSH               0x00004000
87 #define CR_CRYPEN               0x00008000
88 #define CR_PH_INIT              0x00000000
89 #define CR_PH_HEADER            0x00010000
90 #define CR_PH_PAYLOAD           0x00020000
91 #define CR_PH_FINAL             0x00030000
92 #define CR_PH_MASK              0x00030000
93 #define CR_NBPBL_SHIFT          20
94
95 #define SR_BUSY                 0x00000010
96 #define SR_OFNE                 0x00000004
97
98 #define IMSCR_IN                BIT(0)
99 #define IMSCR_OUT               BIT(1)
100
101 #define MISR_IN                 BIT(0)
102 #define MISR_OUT                BIT(1)
103
104 /* Misc */
105 #define AES_BLOCK_32            (AES_BLOCK_SIZE / sizeof(u32))
106 #define GCM_CTR_INIT            2
107 #define _walked_in              (cryp->in_walk.offset - cryp->in_sg->offset)
108 #define _walked_out             (cryp->out_walk.offset - cryp->out_sg->offset)
109 #define CRYP_AUTOSUSPEND_DELAY  50
110
111 struct stm32_cryp_caps {
112         bool                    swap_final;
113         bool                    padding_wa;
114 };
115
116 struct stm32_cryp_ctx {
117         struct crypto_engine_ctx enginectx;
118         struct stm32_cryp       *cryp;
119         int                     keylen;
120         u32                     key[AES_KEYSIZE_256 / sizeof(u32)];
121         unsigned long           flags;
122 };
123
124 struct stm32_cryp_reqctx {
125         unsigned long mode;
126 };
127
128 struct stm32_cryp {
129         struct list_head        list;
130         struct device           *dev;
131         void __iomem            *regs;
132         struct clk              *clk;
133         unsigned long           flags;
134         u32                     irq_status;
135         const struct stm32_cryp_caps *caps;
136         struct stm32_cryp_ctx   *ctx;
137
138         struct crypto_engine    *engine;
139
140         struct mutex            lock; /* protects req / areq */
141         struct ablkcipher_request *req;
142         struct aead_request     *areq;
143
144         size_t                  authsize;
145         size_t                  hw_blocksize;
146
147         size_t                  total_in;
148         size_t                  total_in_save;
149         size_t                  total_out;
150         size_t                  total_out_save;
151
152         struct scatterlist      *in_sg;
153         struct scatterlist      *out_sg;
154         struct scatterlist      *out_sg_save;
155
156         struct scatterlist      in_sgl;
157         struct scatterlist      out_sgl;
158         bool                    sgs_copied;
159
160         int                     in_sg_len;
161         int                     out_sg_len;
162
163         struct scatter_walk     in_walk;
164         struct scatter_walk     out_walk;
165
166         u32                     last_ctr[4];
167         u32                     gcm_ctr;
168 };
169
170 struct stm32_cryp_list {
171         struct list_head        dev_list;
172         spinlock_t              lock; /* protect dev_list */
173 };
174
175 static struct stm32_cryp_list cryp_list = {
176         .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
177         .lock     = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
178 };
179
180 static inline bool is_aes(struct stm32_cryp *cryp)
181 {
182         return cryp->flags & FLG_AES;
183 }
184
185 static inline bool is_des(struct stm32_cryp *cryp)
186 {
187         return cryp->flags & FLG_DES;
188 }
189
190 static inline bool is_tdes(struct stm32_cryp *cryp)
191 {
192         return cryp->flags & FLG_TDES;
193 }
194
195 static inline bool is_ecb(struct stm32_cryp *cryp)
196 {
197         return cryp->flags & FLG_ECB;
198 }
199
200 static inline bool is_cbc(struct stm32_cryp *cryp)
201 {
202         return cryp->flags & FLG_CBC;
203 }
204
205 static inline bool is_ctr(struct stm32_cryp *cryp)
206 {
207         return cryp->flags & FLG_CTR;
208 }
209
210 static inline bool is_gcm(struct stm32_cryp *cryp)
211 {
212         return cryp->flags & FLG_GCM;
213 }
214
215 static inline bool is_ccm(struct stm32_cryp *cryp)
216 {
217         return cryp->flags & FLG_CCM;
218 }
219
220 static inline bool is_encrypt(struct stm32_cryp *cryp)
221 {
222         return cryp->flags & FLG_ENCRYPT;
223 }
224
225 static inline bool is_decrypt(struct stm32_cryp *cryp)
226 {
227         return !is_encrypt(cryp);
228 }
229
230 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
231 {
232         return readl_relaxed(cryp->regs + ofst);
233 }
234
235 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
236 {
237         writel_relaxed(val, cryp->regs + ofst);
238 }
239
240 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
241 {
242         u32 status;
243
244         return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
245                         !(status & SR_BUSY), 10, 100000);
246 }
247
248 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
249 {
250         u32 status;
251
252         return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status,
253                         !(status & CR_CRYPEN), 10, 100000);
254 }
255
256 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
257 {
258         u32 status;
259
260         return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
261                         status & SR_OFNE, 10, 100000);
262 }
263
264 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
265
266 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
267 {
268         struct stm32_cryp *tmp, *cryp = NULL;
269
270         spin_lock_bh(&cryp_list.lock);
271         if (!ctx->cryp) {
272                 list_for_each_entry(tmp, &cryp_list.dev_list, list) {
273                         cryp = tmp;
274                         break;
275                 }
276                 ctx->cryp = cryp;
277         } else {
278                 cryp = ctx->cryp;
279         }
280
281         spin_unlock_bh(&cryp_list.lock);
282
283         return cryp;
284 }
285
286 static int stm32_cryp_check_aligned(struct scatterlist *sg, size_t total,
287                                     size_t align)
288 {
289         int len = 0;
290
291         if (!total)
292                 return 0;
293
294         if (!IS_ALIGNED(total, align))
295                 return -EINVAL;
296
297         while (sg) {
298                 if (!IS_ALIGNED(sg->offset, sizeof(u32)))
299                         return -EINVAL;
300
301                 if (!IS_ALIGNED(sg->length, align))
302                         return -EINVAL;
303
304                 len += sg->length;
305                 sg = sg_next(sg);
306         }
307
308         if (len != total)
309                 return -EINVAL;
310
311         return 0;
312 }
313
314 static int stm32_cryp_check_io_aligned(struct stm32_cryp *cryp)
315 {
316         int ret;
317
318         ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in,
319                                        cryp->hw_blocksize);
320         if (ret)
321                 return ret;
322
323         ret = stm32_cryp_check_aligned(cryp->out_sg, cryp->total_out,
324                                        cryp->hw_blocksize);
325
326         return ret;
327 }
328
329 static void sg_copy_buf(void *buf, struct scatterlist *sg,
330                         unsigned int start, unsigned int nbytes, int out)
331 {
332         struct scatter_walk walk;
333
334         if (!nbytes)
335                 return;
336
337         scatterwalk_start(&walk, sg);
338         scatterwalk_advance(&walk, start);
339         scatterwalk_copychunks(buf, &walk, nbytes, out);
340         scatterwalk_done(&walk, out, 0);
341 }
342
343 static int stm32_cryp_copy_sgs(struct stm32_cryp *cryp)
344 {
345         void *buf_in, *buf_out;
346         int pages, total_in, total_out;
347
348         if (!stm32_cryp_check_io_aligned(cryp)) {
349                 cryp->sgs_copied = 0;
350                 return 0;
351         }
352
353         total_in = ALIGN(cryp->total_in, cryp->hw_blocksize);
354         pages = total_in ? get_order(total_in) : 1;
355         buf_in = (void *)__get_free_pages(GFP_ATOMIC, pages);
356
357         total_out = ALIGN(cryp->total_out, cryp->hw_blocksize);
358         pages = total_out ? get_order(total_out) : 1;
359         buf_out = (void *)__get_free_pages(GFP_ATOMIC, pages);
360
361         if (!buf_in || !buf_out) {
362                 dev_err(cryp->dev, "Can't allocate pages when unaligned\n");
363                 cryp->sgs_copied = 0;
364                 return -EFAULT;
365         }
366
367         sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0);
368
369         sg_init_one(&cryp->in_sgl, buf_in, total_in);
370         cryp->in_sg = &cryp->in_sgl;
371         cryp->in_sg_len = 1;
372
373         sg_init_one(&cryp->out_sgl, buf_out, total_out);
374         cryp->out_sg_save = cryp->out_sg;
375         cryp->out_sg = &cryp->out_sgl;
376         cryp->out_sg_len = 1;
377
378         cryp->sgs_copied = 1;
379
380         return 0;
381 }
382
383 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, u32 *iv)
384 {
385         if (!iv)
386                 return;
387
388         stm32_cryp_write(cryp, CRYP_IV0LR, cpu_to_be32(*iv++));
389         stm32_cryp_write(cryp, CRYP_IV0RR, cpu_to_be32(*iv++));
390
391         if (is_aes(cryp)) {
392                 stm32_cryp_write(cryp, CRYP_IV1LR, cpu_to_be32(*iv++));
393                 stm32_cryp_write(cryp, CRYP_IV1RR, cpu_to_be32(*iv++));
394         }
395 }
396
397 static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
398 {
399         unsigned int i;
400         int r_id;
401
402         if (is_des(c)) {
403                 stm32_cryp_write(c, CRYP_K1LR, cpu_to_be32(c->ctx->key[0]));
404                 stm32_cryp_write(c, CRYP_K1RR, cpu_to_be32(c->ctx->key[1]));
405         } else {
406                 r_id = CRYP_K3RR;
407                 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
408                         stm32_cryp_write(c, r_id,
409                                          cpu_to_be32(c->ctx->key[i - 1]));
410         }
411 }
412
413 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
414 {
415         if (is_aes(cryp) && is_ecb(cryp))
416                 return CR_AES_ECB;
417
418         if (is_aes(cryp) && is_cbc(cryp))
419                 return CR_AES_CBC;
420
421         if (is_aes(cryp) && is_ctr(cryp))
422                 return CR_AES_CTR;
423
424         if (is_aes(cryp) && is_gcm(cryp))
425                 return CR_AES_GCM;
426
427         if (is_aes(cryp) && is_ccm(cryp))
428                 return CR_AES_CCM;
429
430         if (is_des(cryp) && is_ecb(cryp))
431                 return CR_DES_ECB;
432
433         if (is_des(cryp) && is_cbc(cryp))
434                 return CR_DES_CBC;
435
436         if (is_tdes(cryp) && is_ecb(cryp))
437                 return CR_TDES_ECB;
438
439         if (is_tdes(cryp) && is_cbc(cryp))
440                 return CR_TDES_CBC;
441
442         dev_err(cryp->dev, "Unknown mode\n");
443         return CR_AES_UNKNOWN;
444 }
445
446 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
447 {
448         return is_encrypt(cryp) ? cryp->areq->cryptlen :
449                                   cryp->areq->cryptlen - cryp->authsize;
450 }
451
452 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
453 {
454         int ret;
455         u32 iv[4];
456
457         /* Phase 1 : init */
458         memcpy(iv, cryp->areq->iv, 12);
459         iv[3] = cpu_to_be32(GCM_CTR_INIT);
460         cryp->gcm_ctr = GCM_CTR_INIT;
461         stm32_cryp_hw_write_iv(cryp, iv);
462
463         stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
464
465         /* Wait for end of processing */
466         ret = stm32_cryp_wait_enable(cryp);
467         if (ret)
468                 dev_err(cryp->dev, "Timeout (gcm init)\n");
469
470         return ret;
471 }
472
473 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
474 {
475         int ret;
476         u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE];
477         u32 *d;
478         unsigned int i, textlen;
479
480         /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
481         memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
482         memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
483         iv[AES_BLOCK_SIZE - 1] = 1;
484         stm32_cryp_hw_write_iv(cryp, (u32 *)iv);
485
486         /* Build B0 */
487         memcpy(b0, iv, AES_BLOCK_SIZE);
488
489         b0[0] |= (8 * ((cryp->authsize - 2) / 2));
490
491         if (cryp->areq->assoclen)
492                 b0[0] |= 0x40;
493
494         textlen = stm32_cryp_get_input_text_len(cryp);
495
496         b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
497         b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
498
499         /* Enable HW */
500         stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
501
502         /* Write B0 */
503         d = (u32 *)b0;
504
505         for (i = 0; i < AES_BLOCK_32; i++) {
506                 if (!cryp->caps->padding_wa)
507                         *d = cpu_to_be32(*d);
508                 stm32_cryp_write(cryp, CRYP_DIN, *d++);
509         }
510
511         /* Wait for end of processing */
512         ret = stm32_cryp_wait_enable(cryp);
513         if (ret)
514                 dev_err(cryp->dev, "Timeout (ccm init)\n");
515
516         return ret;
517 }
518
519 static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
520 {
521         int ret;
522         u32 cfg, hw_mode;
523
524         pm_runtime_get_sync(cryp->dev);
525
526         /* Disable interrupt */
527         stm32_cryp_write(cryp, CRYP_IMSCR, 0);
528
529         /* Set key */
530         stm32_cryp_hw_write_key(cryp);
531
532         /* Set configuration */
533         cfg = CR_DATA8 | CR_FFLUSH;
534
535         switch (cryp->ctx->keylen) {
536         case AES_KEYSIZE_128:
537                 cfg |= CR_KEY128;
538                 break;
539
540         case AES_KEYSIZE_192:
541                 cfg |= CR_KEY192;
542                 break;
543
544         default:
545         case AES_KEYSIZE_256:
546                 cfg |= CR_KEY256;
547                 break;
548         }
549
550         hw_mode = stm32_cryp_get_hw_mode(cryp);
551         if (hw_mode == CR_AES_UNKNOWN)
552                 return -EINVAL;
553
554         /* AES ECB/CBC decrypt: run key preparation first */
555         if (is_decrypt(cryp) &&
556             ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
557                 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN);
558
559                 /* Wait for end of processing */
560                 ret = stm32_cryp_wait_busy(cryp);
561                 if (ret) {
562                         dev_err(cryp->dev, "Timeout (key preparation)\n");
563                         return ret;
564                 }
565         }
566
567         cfg |= hw_mode;
568
569         if (is_decrypt(cryp))
570                 cfg |= CR_DEC_NOT_ENC;
571
572         /* Apply config and flush (valid when CRYPEN = 0) */
573         stm32_cryp_write(cryp, CRYP_CR, cfg);
574
575         switch (hw_mode) {
576         case CR_AES_GCM:
577         case CR_AES_CCM:
578                 /* Phase 1 : init */
579                 if (hw_mode == CR_AES_CCM)
580                         ret = stm32_cryp_ccm_init(cryp, cfg);
581                 else
582                         ret = stm32_cryp_gcm_init(cryp, cfg);
583
584                 if (ret)
585                         return ret;
586
587                 /* Phase 2 : header (authenticated data) */
588                 if (cryp->areq->assoclen) {
589                         cfg |= CR_PH_HEADER;
590                 } else if (stm32_cryp_get_input_text_len(cryp)) {
591                         cfg |= CR_PH_PAYLOAD;
592                         stm32_cryp_write(cryp, CRYP_CR, cfg);
593                 } else {
594                         cfg |= CR_PH_INIT;
595                 }
596
597                 break;
598
599         case CR_DES_CBC:
600         case CR_TDES_CBC:
601         case CR_AES_CBC:
602         case CR_AES_CTR:
603                 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->req->info);
604                 break;
605
606         default:
607                 break;
608         }
609
610         /* Enable now */
611         cfg |= CR_CRYPEN;
612
613         stm32_cryp_write(cryp, CRYP_CR, cfg);
614
615         cryp->flags &= ~FLG_CCM_PADDED_WA;
616
617         return 0;
618 }
619
620 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
621 {
622         if (!err && (is_gcm(cryp) || is_ccm(cryp)))
623                 /* Phase 4 : output tag */
624                 err = stm32_cryp_read_auth_tag(cryp);
625
626         if (cryp->sgs_copied) {
627                 void *buf_in, *buf_out;
628                 int pages, len;
629
630                 buf_in = sg_virt(&cryp->in_sgl);
631                 buf_out = sg_virt(&cryp->out_sgl);
632
633                 sg_copy_buf(buf_out, cryp->out_sg_save, 0,
634                             cryp->total_out_save, 1);
635
636                 len = ALIGN(cryp->total_in_save, cryp->hw_blocksize);
637                 pages = len ? get_order(len) : 1;
638                 free_pages((unsigned long)buf_in, pages);
639
640                 len = ALIGN(cryp->total_out_save, cryp->hw_blocksize);
641                 pages = len ? get_order(len) : 1;
642                 free_pages((unsigned long)buf_out, pages);
643         }
644
645         pm_runtime_mark_last_busy(cryp->dev);
646         pm_runtime_put_autosuspend(cryp->dev);
647
648         if (is_gcm(cryp) || is_ccm(cryp)) {
649                 crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
650                 cryp->areq = NULL;
651         } else {
652                 crypto_finalize_ablkcipher_request(cryp->engine, cryp->req,
653                                                    err);
654                 cryp->req = NULL;
655         }
656
657         memset(cryp->ctx->key, 0, cryp->ctx->keylen);
658
659         mutex_unlock(&cryp->lock);
660 }
661
662 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
663 {
664         /* Enable interrupt and let the IRQ handler do everything */
665         stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
666
667         return 0;
668 }
669
670 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
671 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
672                                          void *areq);
673
674 static int stm32_cryp_cra_init(struct crypto_tfm *tfm)
675 {
676         struct stm32_cryp_ctx *ctx = crypto_tfm_ctx(tfm);
677
678         tfm->crt_ablkcipher.reqsize = sizeof(struct stm32_cryp_reqctx);
679
680         ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
681         ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
682         ctx->enginectx.op.unprepare_request = NULL;
683         return 0;
684 }
685
686 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
687 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine,
688                                        void *areq);
689
690 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
691 {
692         struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
693
694         tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
695
696         ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req;
697         ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req;
698         ctx->enginectx.op.unprepare_request = NULL;
699
700         return 0;
701 }
702
703 static int stm32_cryp_crypt(struct ablkcipher_request *req, unsigned long mode)
704 {
705         struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
706                         crypto_ablkcipher_reqtfm(req));
707         struct stm32_cryp_reqctx *rctx = ablkcipher_request_ctx(req);
708         struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
709
710         if (!cryp)
711                 return -ENODEV;
712
713         rctx->mode = mode;
714
715         return crypto_transfer_ablkcipher_request_to_engine(cryp->engine, req);
716 }
717
718 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
719 {
720         struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
721         struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
722         struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
723
724         if (!cryp)
725                 return -ENODEV;
726
727         rctx->mode = mode;
728
729         return crypto_transfer_aead_request_to_engine(cryp->engine, req);
730 }
731
732 static int stm32_cryp_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
733                              unsigned int keylen)
734 {
735         struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
736
737         memcpy(ctx->key, key, keylen);
738         ctx->keylen = keylen;
739
740         return 0;
741 }
742
743 static int stm32_cryp_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
744                                  unsigned int keylen)
745 {
746         if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
747             keylen != AES_KEYSIZE_256)
748                 return -EINVAL;
749         else
750                 return stm32_cryp_setkey(tfm, key, keylen);
751 }
752
753 static int stm32_cryp_des_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
754                                  unsigned int keylen)
755 {
756         if (keylen != DES_KEY_SIZE)
757                 return -EINVAL;
758         else
759                 return stm32_cryp_setkey(tfm, key, keylen);
760 }
761
762 static int stm32_cryp_tdes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
763                                   unsigned int keylen)
764 {
765         if (keylen != (3 * DES_KEY_SIZE))
766                 return -EINVAL;
767         else
768                 return stm32_cryp_setkey(tfm, key, keylen);
769 }
770
771 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
772                                       unsigned int keylen)
773 {
774         struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
775
776         if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
777             keylen != AES_KEYSIZE_256)
778                 return -EINVAL;
779
780         memcpy(ctx->key, key, keylen);
781         ctx->keylen = keylen;
782
783         return 0;
784 }
785
786 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
787                                           unsigned int authsize)
788 {
789         return authsize == AES_BLOCK_SIZE ? 0 : -EINVAL;
790 }
791
792 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
793                                           unsigned int authsize)
794 {
795         switch (authsize) {
796         case 4:
797         case 6:
798         case 8:
799         case 10:
800         case 12:
801         case 14:
802         case 16:
803                 break;
804         default:
805                 return -EINVAL;
806         }
807
808         return 0;
809 }
810
811 static int stm32_cryp_aes_ecb_encrypt(struct ablkcipher_request *req)
812 {
813         return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
814 }
815
816 static int stm32_cryp_aes_ecb_decrypt(struct ablkcipher_request *req)
817 {
818         return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
819 }
820
821 static int stm32_cryp_aes_cbc_encrypt(struct ablkcipher_request *req)
822 {
823         return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
824 }
825
826 static int stm32_cryp_aes_cbc_decrypt(struct ablkcipher_request *req)
827 {
828         return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
829 }
830
831 static int stm32_cryp_aes_ctr_encrypt(struct ablkcipher_request *req)
832 {
833         return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
834 }
835
836 static int stm32_cryp_aes_ctr_decrypt(struct ablkcipher_request *req)
837 {
838         return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
839 }
840
841 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
842 {
843         return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
844 }
845
846 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
847 {
848         return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
849 }
850
851 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
852 {
853         return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
854 }
855
856 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
857 {
858         return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
859 }
860
861 static int stm32_cryp_des_ecb_encrypt(struct ablkcipher_request *req)
862 {
863         return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
864 }
865
866 static int stm32_cryp_des_ecb_decrypt(struct ablkcipher_request *req)
867 {
868         return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
869 }
870
871 static int stm32_cryp_des_cbc_encrypt(struct ablkcipher_request *req)
872 {
873         return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
874 }
875
876 static int stm32_cryp_des_cbc_decrypt(struct ablkcipher_request *req)
877 {
878         return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
879 }
880
881 static int stm32_cryp_tdes_ecb_encrypt(struct ablkcipher_request *req)
882 {
883         return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
884 }
885
886 static int stm32_cryp_tdes_ecb_decrypt(struct ablkcipher_request *req)
887 {
888         return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
889 }
890
891 static int stm32_cryp_tdes_cbc_encrypt(struct ablkcipher_request *req)
892 {
893         return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
894 }
895
896 static int stm32_cryp_tdes_cbc_decrypt(struct ablkcipher_request *req)
897 {
898         return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
899 }
900
901 static int stm32_cryp_prepare_req(struct ablkcipher_request *req,
902                                   struct aead_request *areq)
903 {
904         struct stm32_cryp_ctx *ctx;
905         struct stm32_cryp *cryp;
906         struct stm32_cryp_reqctx *rctx;
907         int ret;
908
909         if (!req && !areq)
910                 return -EINVAL;
911
912         ctx = req ? crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req)) :
913                     crypto_aead_ctx(crypto_aead_reqtfm(areq));
914
915         cryp = ctx->cryp;
916
917         if (!cryp)
918                 return -ENODEV;
919
920         mutex_lock(&cryp->lock);
921
922         rctx = req ? ablkcipher_request_ctx(req) : aead_request_ctx(areq);
923         rctx->mode &= FLG_MODE_MASK;
924
925         ctx->cryp = cryp;
926
927         cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
928         cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
929         cryp->ctx = ctx;
930
931         if (req) {
932                 cryp->req = req;
933                 cryp->total_in = req->nbytes;
934                 cryp->total_out = cryp->total_in;
935         } else {
936                 /*
937                  * Length of input and output data:
938                  * Encryption case:
939                  *  INPUT  =   AssocData  ||   PlainText
940                  *          <- assoclen ->  <- cryptlen ->
941                  *          <------- total_in ----------->
942                  *
943                  *  OUTPUT =   AssocData  ||  CipherText  ||   AuthTag
944                  *          <- assoclen ->  <- cryptlen ->  <- authsize ->
945                  *          <---------------- total_out ----------------->
946                  *
947                  * Decryption case:
948                  *  INPUT  =   AssocData  ||  CipherText  ||  AuthTag
949                  *          <- assoclen ->  <--------- cryptlen --------->
950                  *                                          <- authsize ->
951                  *          <---------------- total_in ------------------>
952                  *
953                  *  OUTPUT =   AssocData  ||   PlainText
954                  *          <- assoclen ->  <- crypten - authsize ->
955                  *          <---------- total_out ----------------->
956                  */
957                 cryp->areq = areq;
958                 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
959                 cryp->total_in = areq->assoclen + areq->cryptlen;
960                 if (is_encrypt(cryp))
961                         /* Append auth tag to output */
962                         cryp->total_out = cryp->total_in + cryp->authsize;
963                 else
964                         /* No auth tag in output */
965                         cryp->total_out = cryp->total_in - cryp->authsize;
966         }
967
968         cryp->total_in_save = cryp->total_in;
969         cryp->total_out_save = cryp->total_out;
970
971         cryp->in_sg = req ? req->src : areq->src;
972         cryp->out_sg = req ? req->dst : areq->dst;
973         cryp->out_sg_save = cryp->out_sg;
974
975         cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in);
976         if (cryp->in_sg_len < 0) {
977                 dev_err(cryp->dev, "Cannot get in_sg_len\n");
978                 ret = cryp->in_sg_len;
979                 goto out;
980         }
981
982         cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out);
983         if (cryp->out_sg_len < 0) {
984                 dev_err(cryp->dev, "Cannot get out_sg_len\n");
985                 ret = cryp->out_sg_len;
986                 goto out;
987         }
988
989         ret = stm32_cryp_copy_sgs(cryp);
990         if (ret)
991                 goto out;
992
993         scatterwalk_start(&cryp->in_walk, cryp->in_sg);
994         scatterwalk_start(&cryp->out_walk, cryp->out_sg);
995
996         if (is_gcm(cryp) || is_ccm(cryp)) {
997                 /* In output, jump after assoc data */
998                 scatterwalk_advance(&cryp->out_walk, cryp->areq->assoclen);
999                 cryp->total_out -= cryp->areq->assoclen;
1000         }
1001
1002         ret = stm32_cryp_hw_init(cryp);
1003 out:
1004         if (ret)
1005                 mutex_unlock(&cryp->lock);
1006
1007         return ret;
1008 }
1009
1010 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
1011                                          void *areq)
1012 {
1013         struct ablkcipher_request *req = container_of(areq,
1014                                                       struct ablkcipher_request,
1015                                                       base);
1016
1017         return stm32_cryp_prepare_req(req, NULL);
1018 }
1019
1020 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1021 {
1022         struct ablkcipher_request *req = container_of(areq,
1023                                                       struct ablkcipher_request,
1024                                                       base);
1025         struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
1026                         crypto_ablkcipher_reqtfm(req));
1027         struct stm32_cryp *cryp = ctx->cryp;
1028
1029         if (!cryp)
1030                 return -ENODEV;
1031
1032         return stm32_cryp_cpu_start(cryp);
1033 }
1034
1035 static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq)
1036 {
1037         struct aead_request *req = container_of(areq, struct aead_request,
1038                                                 base);
1039
1040         return stm32_cryp_prepare_req(NULL, req);
1041 }
1042
1043 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1044 {
1045         struct aead_request *req = container_of(areq, struct aead_request,
1046                                                 base);
1047         struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1048         struct stm32_cryp *cryp = ctx->cryp;
1049
1050         if (!cryp)
1051                 return -ENODEV;
1052
1053         if (unlikely(!cryp->areq->assoclen &&
1054                      !stm32_cryp_get_input_text_len(cryp))) {
1055                 /* No input data to process: get tag and finish */
1056                 stm32_cryp_finish_req(cryp, 0);
1057                 return 0;
1058         }
1059
1060         return stm32_cryp_cpu_start(cryp);
1061 }
1062
1063 static u32 *stm32_cryp_next_out(struct stm32_cryp *cryp, u32 *dst,
1064                                 unsigned int n)
1065 {
1066         scatterwalk_advance(&cryp->out_walk, n);
1067
1068         if (unlikely(cryp->out_sg->length == _walked_out)) {
1069                 cryp->out_sg = sg_next(cryp->out_sg);
1070                 if (cryp->out_sg) {
1071                         scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1072                         return (sg_virt(cryp->out_sg) + _walked_out);
1073                 }
1074         }
1075
1076         return (u32 *)((u8 *)dst + n);
1077 }
1078
1079 static u32 *stm32_cryp_next_in(struct stm32_cryp *cryp, u32 *src,
1080                                unsigned int n)
1081 {
1082         scatterwalk_advance(&cryp->in_walk, n);
1083
1084         if (unlikely(cryp->in_sg->length == _walked_in)) {
1085                 cryp->in_sg = sg_next(cryp->in_sg);
1086                 if (cryp->in_sg) {
1087                         scatterwalk_start(&cryp->in_walk, cryp->in_sg);
1088                         return (sg_virt(cryp->in_sg) + _walked_in);
1089                 }
1090         }
1091
1092         return (u32 *)((u8 *)src + n);
1093 }
1094
1095 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1096 {
1097         u32 cfg, size_bit, *dst, d32;
1098         u8 *d8;
1099         unsigned int i, j;
1100         int ret = 0;
1101
1102         /* Update Config */
1103         cfg = stm32_cryp_read(cryp, CRYP_CR);
1104
1105         cfg &= ~CR_PH_MASK;
1106         cfg |= CR_PH_FINAL;
1107         cfg &= ~CR_DEC_NOT_ENC;
1108         cfg |= CR_CRYPEN;
1109
1110         stm32_cryp_write(cryp, CRYP_CR, cfg);
1111
1112         if (is_gcm(cryp)) {
1113                 /* GCM: write aad and payload size (in bits) */
1114                 size_bit = cryp->areq->assoclen * 8;
1115                 if (cryp->caps->swap_final)
1116                         size_bit = cpu_to_be32(size_bit);
1117
1118                 stm32_cryp_write(cryp, CRYP_DIN, 0);
1119                 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1120
1121                 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1122                                 cryp->areq->cryptlen - AES_BLOCK_SIZE;
1123                 size_bit *= 8;
1124                 if (cryp->caps->swap_final)
1125                         size_bit = cpu_to_be32(size_bit);
1126
1127                 stm32_cryp_write(cryp, CRYP_DIN, 0);
1128                 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1129         } else {
1130                 /* CCM: write CTR0 */
1131                 u8 iv[AES_BLOCK_SIZE];
1132                 u32 *iv32 = (u32 *)iv;
1133
1134                 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1135                 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1136
1137                 for (i = 0; i < AES_BLOCK_32; i++) {
1138                         if (!cryp->caps->padding_wa)
1139                                 *iv32 = cpu_to_be32(*iv32);
1140                         stm32_cryp_write(cryp, CRYP_DIN, *iv32++);
1141                 }
1142         }
1143
1144         /* Wait for output data */
1145         ret = stm32_cryp_wait_output(cryp);
1146         if (ret) {
1147                 dev_err(cryp->dev, "Timeout (read tag)\n");
1148                 return ret;
1149         }
1150
1151         if (is_encrypt(cryp)) {
1152                 /* Get and write tag */
1153                 dst = sg_virt(cryp->out_sg) + _walked_out;
1154
1155                 for (i = 0; i < AES_BLOCK_32; i++) {
1156                         if (cryp->total_out >= sizeof(u32)) {
1157                                 /* Read a full u32 */
1158                                 *dst = stm32_cryp_read(cryp, CRYP_DOUT);
1159
1160                                 dst = stm32_cryp_next_out(cryp, dst,
1161                                                           sizeof(u32));
1162                                 cryp->total_out -= sizeof(u32);
1163                         } else if (!cryp->total_out) {
1164                                 /* Empty fifo out (data from input padding) */
1165                                 stm32_cryp_read(cryp, CRYP_DOUT);
1166                         } else {
1167                                 /* Read less than an u32 */
1168                                 d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1169                                 d8 = (u8 *)&d32;
1170
1171                                 for (j = 0; j < cryp->total_out; j++) {
1172                                         *((u8 *)dst) = *(d8++);
1173                                         dst = stm32_cryp_next_out(cryp, dst, 1);
1174                                 }
1175                                 cryp->total_out = 0;
1176                         }
1177                 }
1178         } else {
1179                 /* Get and check tag */
1180                 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1181
1182                 scatterwalk_map_and_copy(in_tag, cryp->in_sg,
1183                                          cryp->total_in_save - cryp->authsize,
1184                                          cryp->authsize, 0);
1185
1186                 for (i = 0; i < AES_BLOCK_32; i++)
1187                         out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1188
1189                 if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1190                         ret = -EBADMSG;
1191         }
1192
1193         /* Disable cryp */
1194         cfg &= ~CR_CRYPEN;
1195         stm32_cryp_write(cryp, CRYP_CR, cfg);
1196
1197         return ret;
1198 }
1199
1200 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1201 {
1202         u32 cr;
1203
1204         if (unlikely(cryp->last_ctr[3] == 0xFFFFFFFF)) {
1205                 cryp->last_ctr[3] = 0;
1206                 cryp->last_ctr[2]++;
1207                 if (!cryp->last_ctr[2]) {
1208                         cryp->last_ctr[1]++;
1209                         if (!cryp->last_ctr[1])
1210                                 cryp->last_ctr[0]++;
1211                 }
1212
1213                 cr = stm32_cryp_read(cryp, CRYP_CR);
1214                 stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
1215
1216                 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->last_ctr);
1217
1218                 stm32_cryp_write(cryp, CRYP_CR, cr);
1219         }
1220
1221         cryp->last_ctr[0] = stm32_cryp_read(cryp, CRYP_IV0LR);
1222         cryp->last_ctr[1] = stm32_cryp_read(cryp, CRYP_IV0RR);
1223         cryp->last_ctr[2] = stm32_cryp_read(cryp, CRYP_IV1LR);
1224         cryp->last_ctr[3] = stm32_cryp_read(cryp, CRYP_IV1RR);
1225 }
1226
1227 static bool stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1228 {
1229         unsigned int i, j;
1230         u32 d32, *dst;
1231         u8 *d8;
1232         size_t tag_size;
1233
1234         /* Do no read tag now (if any) */
1235         if (is_encrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
1236                 tag_size = cryp->authsize;
1237         else
1238                 tag_size = 0;
1239
1240         dst = sg_virt(cryp->out_sg) + _walked_out;
1241
1242         for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
1243                 if (likely(cryp->total_out - tag_size >= sizeof(u32))) {
1244                         /* Read a full u32 */
1245                         *dst = stm32_cryp_read(cryp, CRYP_DOUT);
1246
1247                         dst = stm32_cryp_next_out(cryp, dst, sizeof(u32));
1248                         cryp->total_out -= sizeof(u32);
1249                 } else if (cryp->total_out == tag_size) {
1250                         /* Empty fifo out (data from input padding) */
1251                         d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1252                 } else {
1253                         /* Read less than an u32 */
1254                         d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1255                         d8 = (u8 *)&d32;
1256
1257                         for (j = 0; j < cryp->total_out - tag_size; j++) {
1258                                 *((u8 *)dst) = *(d8++);
1259                                 dst = stm32_cryp_next_out(cryp, dst, 1);
1260                         }
1261                         cryp->total_out = tag_size;
1262                 }
1263         }
1264
1265         return !(cryp->total_out - tag_size) || !cryp->total_in;
1266 }
1267
1268 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1269 {
1270         unsigned int i, j;
1271         u32 *src;
1272         u8 d8[4];
1273         size_t tag_size;
1274
1275         /* Do no write tag (if any) */
1276         if (is_decrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
1277                 tag_size = cryp->authsize;
1278         else
1279                 tag_size = 0;
1280
1281         src = sg_virt(cryp->in_sg) + _walked_in;
1282
1283         for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
1284                 if (likely(cryp->total_in - tag_size >= sizeof(u32))) {
1285                         /* Write a full u32 */
1286                         stm32_cryp_write(cryp, CRYP_DIN, *src);
1287
1288                         src = stm32_cryp_next_in(cryp, src, sizeof(u32));
1289                         cryp->total_in -= sizeof(u32);
1290                 } else if (cryp->total_in == tag_size) {
1291                         /* Write padding data */
1292                         stm32_cryp_write(cryp, CRYP_DIN, 0);
1293                 } else {
1294                         /* Write less than an u32 */
1295                         memset(d8, 0, sizeof(u32));
1296                         for (j = 0; j < cryp->total_in - tag_size; j++) {
1297                                 d8[j] = *((u8 *)src);
1298                                 src = stm32_cryp_next_in(cryp, src, 1);
1299                         }
1300
1301                         stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1302                         cryp->total_in = tag_size;
1303                 }
1304         }
1305 }
1306
1307 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1308 {
1309         int err;
1310         u32 cfg, tmp[AES_BLOCK_32];
1311         size_t total_in_ori = cryp->total_in;
1312         struct scatterlist *out_sg_ori = cryp->out_sg;
1313         unsigned int i;
1314
1315         /* 'Special workaround' procedure described in the datasheet */
1316
1317         /* a) disable ip */
1318         stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1319         cfg = stm32_cryp_read(cryp, CRYP_CR);
1320         cfg &= ~CR_CRYPEN;
1321         stm32_cryp_write(cryp, CRYP_CR, cfg);
1322
1323         /* b) Update IV1R */
1324         stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2);
1325
1326         /* c) change mode to CTR */
1327         cfg &= ~CR_ALGO_MASK;
1328         cfg |= CR_AES_CTR;
1329         stm32_cryp_write(cryp, CRYP_CR, cfg);
1330
1331         /* a) enable IP */
1332         cfg |= CR_CRYPEN;
1333         stm32_cryp_write(cryp, CRYP_CR, cfg);
1334
1335         /* b) pad and write the last block */
1336         stm32_cryp_irq_write_block(cryp);
1337         cryp->total_in = total_in_ori;
1338         err = stm32_cryp_wait_output(cryp);
1339         if (err) {
1340                 dev_err(cryp->dev, "Timeout (write gcm header)\n");
1341                 return stm32_cryp_finish_req(cryp, err);
1342         }
1343
1344         /* c) get and store encrypted data */
1345         stm32_cryp_irq_read_data(cryp);
1346         scatterwalk_map_and_copy(tmp, out_sg_ori,
1347                                  cryp->total_in_save - total_in_ori,
1348                                  total_in_ori, 0);
1349
1350         /* d) change mode back to AES GCM */
1351         cfg &= ~CR_ALGO_MASK;
1352         cfg |= CR_AES_GCM;
1353         stm32_cryp_write(cryp, CRYP_CR, cfg);
1354
1355         /* e) change phase to Final */
1356         cfg &= ~CR_PH_MASK;
1357         cfg |= CR_PH_FINAL;
1358         stm32_cryp_write(cryp, CRYP_CR, cfg);
1359
1360         /* f) write padded data */
1361         for (i = 0; i < AES_BLOCK_32; i++) {
1362                 if (cryp->total_in)
1363                         stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
1364                 else
1365                         stm32_cryp_write(cryp, CRYP_DIN, 0);
1366
1367                 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
1368         }
1369
1370         /* g) Empty fifo out */
1371         err = stm32_cryp_wait_output(cryp);
1372         if (err) {
1373                 dev_err(cryp->dev, "Timeout (write gcm header)\n");
1374                 return stm32_cryp_finish_req(cryp, err);
1375         }
1376
1377         for (i = 0; i < AES_BLOCK_32; i++)
1378                 stm32_cryp_read(cryp, CRYP_DOUT);
1379
1380         /* h) run the he normal Final phase */
1381         stm32_cryp_finish_req(cryp, 0);
1382 }
1383
1384 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
1385 {
1386         u32 cfg, payload_bytes;
1387
1388         /* disable ip, set NPBLB and reneable ip */
1389         cfg = stm32_cryp_read(cryp, CRYP_CR);
1390         cfg &= ~CR_CRYPEN;
1391         stm32_cryp_write(cryp, CRYP_CR, cfg);
1392
1393         payload_bytes = is_decrypt(cryp) ? cryp->total_in - cryp->authsize :
1394                                            cryp->total_in;
1395         cfg |= (cryp->hw_blocksize - payload_bytes) << CR_NBPBL_SHIFT;
1396         cfg |= CR_CRYPEN;
1397         stm32_cryp_write(cryp, CRYP_CR, cfg);
1398 }
1399
1400 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
1401 {
1402         int err = 0;
1403         u32 cfg, iv1tmp;
1404         u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32], tmp[AES_BLOCK_32];
1405         size_t last_total_out, total_in_ori = cryp->total_in;
1406         struct scatterlist *out_sg_ori = cryp->out_sg;
1407         unsigned int i;
1408
1409         /* 'Special workaround' procedure described in the datasheet */
1410         cryp->flags |= FLG_CCM_PADDED_WA;
1411
1412         /* a) disable ip */
1413         stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1414
1415         cfg = stm32_cryp_read(cryp, CRYP_CR);
1416         cfg &= ~CR_CRYPEN;
1417         stm32_cryp_write(cryp, CRYP_CR, cfg);
1418
1419         /* b) get IV1 from CRYP_CSGCMCCM7 */
1420         iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
1421
1422         /* c) Load CRYP_CSGCMCCMxR */
1423         for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
1424                 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1425
1426         /* d) Write IV1R */
1427         stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp);
1428
1429         /* e) change mode to CTR */
1430         cfg &= ~CR_ALGO_MASK;
1431         cfg |= CR_AES_CTR;
1432         stm32_cryp_write(cryp, CRYP_CR, cfg);
1433
1434         /* a) enable IP */
1435         cfg |= CR_CRYPEN;
1436         stm32_cryp_write(cryp, CRYP_CR, cfg);
1437
1438         /* b) pad and write the last block */
1439         stm32_cryp_irq_write_block(cryp);
1440         cryp->total_in = total_in_ori;
1441         err = stm32_cryp_wait_output(cryp);
1442         if (err) {
1443                 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1444                 return stm32_cryp_finish_req(cryp, err);
1445         }
1446
1447         /* c) get and store decrypted data */
1448         last_total_out = cryp->total_out;
1449         stm32_cryp_irq_read_data(cryp);
1450
1451         memset(tmp, 0, sizeof(tmp));
1452         scatterwalk_map_and_copy(tmp, out_sg_ori,
1453                                  cryp->total_out_save - last_total_out,
1454                                  last_total_out, 0);
1455
1456         /* d) Load again CRYP_CSGCMCCMxR */
1457         for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
1458                 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1459
1460         /* e) change mode back to AES CCM */
1461         cfg &= ~CR_ALGO_MASK;
1462         cfg |= CR_AES_CCM;
1463         stm32_cryp_write(cryp, CRYP_CR, cfg);
1464
1465         /* f) change phase to header */
1466         cfg &= ~CR_PH_MASK;
1467         cfg |= CR_PH_HEADER;
1468         stm32_cryp_write(cryp, CRYP_CR, cfg);
1469
1470         /* g) XOR and write padded data */
1471         for (i = 0; i < ARRAY_SIZE(tmp); i++) {
1472                 tmp[i] ^= cstmp1[i];
1473                 tmp[i] ^= cstmp2[i];
1474                 stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
1475         }
1476
1477         /* h) wait for completion */
1478         err = stm32_cryp_wait_busy(cryp);
1479         if (err)
1480                 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1481
1482         /* i) run the he normal Final phase */
1483         stm32_cryp_finish_req(cryp, err);
1484 }
1485
1486 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
1487 {
1488         if (unlikely(!cryp->total_in)) {
1489                 dev_warn(cryp->dev, "No more data to process\n");
1490                 return;
1491         }
1492
1493         if (unlikely(cryp->total_in < AES_BLOCK_SIZE &&
1494                      (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
1495                      is_encrypt(cryp))) {
1496                 /* Padding for AES GCM encryption */
1497                 if (cryp->caps->padding_wa)
1498                         /* Special case 1 */
1499                         return stm32_cryp_irq_write_gcm_padded_data(cryp);
1500
1501                 /* Setting padding bytes (NBBLB) */
1502                 stm32_cryp_irq_set_npblb(cryp);
1503         }
1504
1505         if (unlikely((cryp->total_in - cryp->authsize < AES_BLOCK_SIZE) &&
1506                      (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
1507                      is_decrypt(cryp))) {
1508                 /* Padding for AES CCM decryption */
1509                 if (cryp->caps->padding_wa)
1510                         /* Special case 2 */
1511                         return stm32_cryp_irq_write_ccm_padded_data(cryp);
1512
1513                 /* Setting padding bytes (NBBLB) */
1514                 stm32_cryp_irq_set_npblb(cryp);
1515         }
1516
1517         if (is_aes(cryp) && is_ctr(cryp))
1518                 stm32_cryp_check_ctr_counter(cryp);
1519
1520         stm32_cryp_irq_write_block(cryp);
1521 }
1522
1523 static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp *cryp)
1524 {
1525         int err;
1526         unsigned int i, j;
1527         u32 cfg, *src;
1528
1529         src = sg_virt(cryp->in_sg) + _walked_in;
1530
1531         for (i = 0; i < AES_BLOCK_32; i++) {
1532                 stm32_cryp_write(cryp, CRYP_DIN, *src);
1533
1534                 src = stm32_cryp_next_in(cryp, src, sizeof(u32));
1535                 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
1536
1537                 /* Check if whole header written */
1538                 if ((cryp->total_in_save - cryp->total_in) ==
1539                                 cryp->areq->assoclen) {
1540                         /* Write padding if needed */
1541                         for (j = i + 1; j < AES_BLOCK_32; j++)
1542                                 stm32_cryp_write(cryp, CRYP_DIN, 0);
1543
1544                         /* Wait for completion */
1545                         err = stm32_cryp_wait_busy(cryp);
1546                         if (err) {
1547                                 dev_err(cryp->dev, "Timeout (gcm header)\n");
1548                                 return stm32_cryp_finish_req(cryp, err);
1549                         }
1550
1551                         if (stm32_cryp_get_input_text_len(cryp)) {
1552                                 /* Phase 3 : payload */
1553                                 cfg = stm32_cryp_read(cryp, CRYP_CR);
1554                                 cfg &= ~CR_CRYPEN;
1555                                 stm32_cryp_write(cryp, CRYP_CR, cfg);
1556
1557                                 cfg &= ~CR_PH_MASK;
1558                                 cfg |= CR_PH_PAYLOAD;
1559                                 cfg |= CR_CRYPEN;
1560                                 stm32_cryp_write(cryp, CRYP_CR, cfg);
1561                         } else {
1562                                 /* Phase 4 : tag */
1563                                 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1564                                 stm32_cryp_finish_req(cryp, 0);
1565                         }
1566
1567                         break;
1568                 }
1569
1570                 if (!cryp->total_in)
1571                         break;
1572         }
1573 }
1574
1575 static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp *cryp)
1576 {
1577         int err;
1578         unsigned int i = 0, j, k;
1579         u32 alen, cfg, *src;
1580         u8 d8[4];
1581
1582         src = sg_virt(cryp->in_sg) + _walked_in;
1583         alen = cryp->areq->assoclen;
1584
1585         if (!_walked_in) {
1586                 if (cryp->areq->assoclen <= 65280) {
1587                         /* Write first u32 of B1 */
1588                         d8[0] = (alen >> 8) & 0xFF;
1589                         d8[1] = alen & 0xFF;
1590                         d8[2] = *((u8 *)src);
1591                         src = stm32_cryp_next_in(cryp, src, 1);
1592                         d8[3] = *((u8 *)src);
1593                         src = stm32_cryp_next_in(cryp, src, 1);
1594
1595                         stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1596                         i++;
1597
1598                         cryp->total_in -= min_t(size_t, 2, cryp->total_in);
1599                 } else {
1600                         /* Build the two first u32 of B1 */
1601                         d8[0] = 0xFF;
1602                         d8[1] = 0xFE;
1603                         d8[2] = alen & 0xFF000000;
1604                         d8[3] = alen & 0x00FF0000;
1605
1606                         stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1607                         i++;
1608
1609                         d8[0] = alen & 0x0000FF00;
1610                         d8[1] = alen & 0x000000FF;
1611                         d8[2] = *((u8 *)src);
1612                         src = stm32_cryp_next_in(cryp, src, 1);
1613                         d8[3] = *((u8 *)src);
1614                         src = stm32_cryp_next_in(cryp, src, 1);
1615
1616                         stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1617                         i++;
1618
1619                         cryp->total_in -= min_t(size_t, 2, cryp->total_in);
1620                 }
1621         }
1622
1623         /* Write next u32 */
1624         for (; i < AES_BLOCK_32; i++) {
1625                 /* Build an u32 */
1626                 memset(d8, 0, sizeof(u32));
1627                 for (k = 0; k < sizeof(u32); k++) {
1628                         d8[k] = *((u8 *)src);
1629                         src = stm32_cryp_next_in(cryp, src, 1);
1630
1631                         cryp->total_in -= min_t(size_t, 1, cryp->total_in);
1632                         if ((cryp->total_in_save - cryp->total_in) == alen)
1633                                 break;
1634                 }
1635
1636                 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1637
1638                 if ((cryp->total_in_save - cryp->total_in) == alen) {
1639                         /* Write padding if needed */
1640                         for (j = i + 1; j < AES_BLOCK_32; j++)
1641                                 stm32_cryp_write(cryp, CRYP_DIN, 0);
1642
1643                         /* Wait for completion */
1644                         err = stm32_cryp_wait_busy(cryp);
1645                         if (err) {
1646                                 dev_err(cryp->dev, "Timeout (ccm header)\n");
1647                                 return stm32_cryp_finish_req(cryp, err);
1648                         }
1649
1650                         if (stm32_cryp_get_input_text_len(cryp)) {
1651                                 /* Phase 3 : payload */
1652                                 cfg = stm32_cryp_read(cryp, CRYP_CR);
1653                                 cfg &= ~CR_CRYPEN;
1654                                 stm32_cryp_write(cryp, CRYP_CR, cfg);
1655
1656                                 cfg &= ~CR_PH_MASK;
1657                                 cfg |= CR_PH_PAYLOAD;
1658                                 cfg |= CR_CRYPEN;
1659                                 stm32_cryp_write(cryp, CRYP_CR, cfg);
1660                         } else {
1661                                 /* Phase 4 : tag */
1662                                 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1663                                 stm32_cryp_finish_req(cryp, 0);
1664                         }
1665
1666                         break;
1667                 }
1668         }
1669 }
1670
1671 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
1672 {
1673         struct stm32_cryp *cryp = arg;
1674         u32 ph;
1675
1676         if (cryp->irq_status & MISR_OUT)
1677                 /* Output FIFO IRQ: read data */
1678                 if (unlikely(stm32_cryp_irq_read_data(cryp))) {
1679                         /* All bytes processed, finish */
1680                         stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1681                         stm32_cryp_finish_req(cryp, 0);
1682                         return IRQ_HANDLED;
1683                 }
1684
1685         if (cryp->irq_status & MISR_IN) {
1686                 if (is_gcm(cryp)) {
1687                         ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1688                         if (unlikely(ph == CR_PH_HEADER))
1689                                 /* Write Header */
1690                                 stm32_cryp_irq_write_gcm_header(cryp);
1691                         else
1692                                 /* Input FIFO IRQ: write data */
1693                                 stm32_cryp_irq_write_data(cryp);
1694                         cryp->gcm_ctr++;
1695                 } else if (is_ccm(cryp)) {
1696                         ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1697                         if (unlikely(ph == CR_PH_HEADER))
1698                                 /* Write Header */
1699                                 stm32_cryp_irq_write_ccm_header(cryp);
1700                         else
1701                                 /* Input FIFO IRQ: write data */
1702                                 stm32_cryp_irq_write_data(cryp);
1703                 } else {
1704                         /* Input FIFO IRQ: write data */
1705                         stm32_cryp_irq_write_data(cryp);
1706                 }
1707         }
1708
1709         return IRQ_HANDLED;
1710 }
1711
1712 static irqreturn_t stm32_cryp_irq(int irq, void *arg)
1713 {
1714         struct stm32_cryp *cryp = arg;
1715
1716         cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
1717
1718         return IRQ_WAKE_THREAD;
1719 }
1720
1721 static struct crypto_alg crypto_algs[] = {
1722 {
1723         .cra_name               = "ecb(aes)",
1724         .cra_driver_name        = "stm32-ecb-aes",
1725         .cra_priority           = 200,
1726         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1727                                   CRYPTO_ALG_ASYNC,
1728         .cra_blocksize          = AES_BLOCK_SIZE,
1729         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1730         .cra_alignmask          = 0xf,
1731         .cra_type               = &crypto_ablkcipher_type,
1732         .cra_module             = THIS_MODULE,
1733         .cra_init               = stm32_cryp_cra_init,
1734         .cra_ablkcipher = {
1735                 .min_keysize    = AES_MIN_KEY_SIZE,
1736                 .max_keysize    = AES_MAX_KEY_SIZE,
1737                 .setkey         = stm32_cryp_aes_setkey,
1738                 .encrypt        = stm32_cryp_aes_ecb_encrypt,
1739                 .decrypt        = stm32_cryp_aes_ecb_decrypt,
1740         }
1741 },
1742 {
1743         .cra_name               = "cbc(aes)",
1744         .cra_driver_name        = "stm32-cbc-aes",
1745         .cra_priority           = 200,
1746         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1747                                   CRYPTO_ALG_ASYNC,
1748         .cra_blocksize          = AES_BLOCK_SIZE,
1749         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1750         .cra_alignmask          = 0xf,
1751         .cra_type               = &crypto_ablkcipher_type,
1752         .cra_module             = THIS_MODULE,
1753         .cra_init               = stm32_cryp_cra_init,
1754         .cra_ablkcipher = {
1755                 .min_keysize    = AES_MIN_KEY_SIZE,
1756                 .max_keysize    = AES_MAX_KEY_SIZE,
1757                 .ivsize         = AES_BLOCK_SIZE,
1758                 .setkey         = stm32_cryp_aes_setkey,
1759                 .encrypt        = stm32_cryp_aes_cbc_encrypt,
1760                 .decrypt        = stm32_cryp_aes_cbc_decrypt,
1761         }
1762 },
1763 {
1764         .cra_name               = "ctr(aes)",
1765         .cra_driver_name        = "stm32-ctr-aes",
1766         .cra_priority           = 200,
1767         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1768                                   CRYPTO_ALG_ASYNC,
1769         .cra_blocksize          = 1,
1770         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1771         .cra_alignmask          = 0xf,
1772         .cra_type               = &crypto_ablkcipher_type,
1773         .cra_module             = THIS_MODULE,
1774         .cra_init               = stm32_cryp_cra_init,
1775         .cra_ablkcipher = {
1776                 .min_keysize    = AES_MIN_KEY_SIZE,
1777                 .max_keysize    = AES_MAX_KEY_SIZE,
1778                 .ivsize         = AES_BLOCK_SIZE,
1779                 .setkey         = stm32_cryp_aes_setkey,
1780                 .encrypt        = stm32_cryp_aes_ctr_encrypt,
1781                 .decrypt        = stm32_cryp_aes_ctr_decrypt,
1782         }
1783 },
1784 {
1785         .cra_name               = "ecb(des)",
1786         .cra_driver_name        = "stm32-ecb-des",
1787         .cra_priority           = 200,
1788         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1789                                   CRYPTO_ALG_ASYNC,
1790         .cra_blocksize          = DES_BLOCK_SIZE,
1791         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1792         .cra_alignmask          = 0xf,
1793         .cra_type               = &crypto_ablkcipher_type,
1794         .cra_module             = THIS_MODULE,
1795         .cra_init               = stm32_cryp_cra_init,
1796         .cra_ablkcipher = {
1797                 .min_keysize    = DES_BLOCK_SIZE,
1798                 .max_keysize    = DES_BLOCK_SIZE,
1799                 .setkey         = stm32_cryp_des_setkey,
1800                 .encrypt        = stm32_cryp_des_ecb_encrypt,
1801                 .decrypt        = stm32_cryp_des_ecb_decrypt,
1802         }
1803 },
1804 {
1805         .cra_name               = "cbc(des)",
1806         .cra_driver_name        = "stm32-cbc-des",
1807         .cra_priority           = 200,
1808         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1809                                   CRYPTO_ALG_ASYNC,
1810         .cra_blocksize          = DES_BLOCK_SIZE,
1811         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1812         .cra_alignmask          = 0xf,
1813         .cra_type               = &crypto_ablkcipher_type,
1814         .cra_module             = THIS_MODULE,
1815         .cra_init               = stm32_cryp_cra_init,
1816         .cra_ablkcipher = {
1817                 .min_keysize    = DES_BLOCK_SIZE,
1818                 .max_keysize    = DES_BLOCK_SIZE,
1819                 .ivsize         = DES_BLOCK_SIZE,
1820                 .setkey         = stm32_cryp_des_setkey,
1821                 .encrypt        = stm32_cryp_des_cbc_encrypt,
1822                 .decrypt        = stm32_cryp_des_cbc_decrypt,
1823         }
1824 },
1825 {
1826         .cra_name               = "ecb(des3_ede)",
1827         .cra_driver_name        = "stm32-ecb-des3",
1828         .cra_priority           = 200,
1829         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1830                                   CRYPTO_ALG_ASYNC,
1831         .cra_blocksize          = DES_BLOCK_SIZE,
1832         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1833         .cra_alignmask          = 0xf,
1834         .cra_type               = &crypto_ablkcipher_type,
1835         .cra_module             = THIS_MODULE,
1836         .cra_init               = stm32_cryp_cra_init,
1837         .cra_ablkcipher = {
1838                 .min_keysize    = 3 * DES_BLOCK_SIZE,
1839                 .max_keysize    = 3 * DES_BLOCK_SIZE,
1840                 .setkey         = stm32_cryp_tdes_setkey,
1841                 .encrypt        = stm32_cryp_tdes_ecb_encrypt,
1842                 .decrypt        = stm32_cryp_tdes_ecb_decrypt,
1843         }
1844 },
1845 {
1846         .cra_name               = "cbc(des3_ede)",
1847         .cra_driver_name        = "stm32-cbc-des3",
1848         .cra_priority           = 200,
1849         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
1850                                   CRYPTO_ALG_ASYNC,
1851         .cra_blocksize          = DES_BLOCK_SIZE,
1852         .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1853         .cra_alignmask          = 0xf,
1854         .cra_type               = &crypto_ablkcipher_type,
1855         .cra_module             = THIS_MODULE,
1856         .cra_init               = stm32_cryp_cra_init,
1857         .cra_ablkcipher = {
1858                 .min_keysize    = 3 * DES_BLOCK_SIZE,
1859                 .max_keysize    = 3 * DES_BLOCK_SIZE,
1860                 .ivsize         = DES_BLOCK_SIZE,
1861                 .setkey         = stm32_cryp_tdes_setkey,
1862                 .encrypt        = stm32_cryp_tdes_cbc_encrypt,
1863                 .decrypt        = stm32_cryp_tdes_cbc_decrypt,
1864         }
1865 },
1866 };
1867
1868 static struct aead_alg aead_algs[] = {
1869 {
1870         .setkey         = stm32_cryp_aes_aead_setkey,
1871         .setauthsize    = stm32_cryp_aes_gcm_setauthsize,
1872         .encrypt        = stm32_cryp_aes_gcm_encrypt,
1873         .decrypt        = stm32_cryp_aes_gcm_decrypt,
1874         .init           = stm32_cryp_aes_aead_init,
1875         .ivsize         = 12,
1876         .maxauthsize    = AES_BLOCK_SIZE,
1877
1878         .base = {
1879                 .cra_name               = "gcm(aes)",
1880                 .cra_driver_name        = "stm32-gcm-aes",
1881                 .cra_priority           = 200,
1882                 .cra_flags              = CRYPTO_ALG_ASYNC,
1883                 .cra_blocksize          = 1,
1884                 .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1885                 .cra_alignmask          = 0xf,
1886                 .cra_module             = THIS_MODULE,
1887         },
1888 },
1889 {
1890         .setkey         = stm32_cryp_aes_aead_setkey,
1891         .setauthsize    = stm32_cryp_aes_ccm_setauthsize,
1892         .encrypt        = stm32_cryp_aes_ccm_encrypt,
1893         .decrypt        = stm32_cryp_aes_ccm_decrypt,
1894         .init           = stm32_cryp_aes_aead_init,
1895         .ivsize         = AES_BLOCK_SIZE,
1896         .maxauthsize    = AES_BLOCK_SIZE,
1897
1898         .base = {
1899                 .cra_name               = "ccm(aes)",
1900                 .cra_driver_name        = "stm32-ccm-aes",
1901                 .cra_priority           = 200,
1902                 .cra_flags              = CRYPTO_ALG_ASYNC,
1903                 .cra_blocksize          = 1,
1904                 .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1905                 .cra_alignmask          = 0xf,
1906                 .cra_module             = THIS_MODULE,
1907         },
1908 },
1909 };
1910
1911 static const struct stm32_cryp_caps f7_data = {
1912         .swap_final = true,
1913         .padding_wa = true,
1914 };
1915
1916 static const struct stm32_cryp_caps mp1_data = {
1917         .swap_final = false,
1918         .padding_wa = false,
1919 };
1920
1921 static const struct of_device_id stm32_dt_ids[] = {
1922         { .compatible = "st,stm32f756-cryp", .data = &f7_data},
1923         { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1924         {},
1925 };
1926 MODULE_DEVICE_TABLE(of, stm32_dt_ids);
1927
1928 static int stm32_cryp_probe(struct platform_device *pdev)
1929 {
1930         struct device *dev = &pdev->dev;
1931         struct stm32_cryp *cryp;
1932         struct resource *res;
1933         struct reset_control *rst;
1934         int irq, ret;
1935
1936         cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
1937         if (!cryp)
1938                 return -ENOMEM;
1939
1940         cryp->caps = of_device_get_match_data(dev);
1941         if (!cryp->caps)
1942                 return -ENODEV;
1943
1944         cryp->dev = dev;
1945
1946         mutex_init(&cryp->lock);
1947
1948         res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1949         cryp->regs = devm_ioremap_resource(dev, res);
1950         if (IS_ERR(cryp->regs))
1951                 return PTR_ERR(cryp->regs);
1952
1953         irq = platform_get_irq(pdev, 0);
1954         if (irq < 0) {
1955                 dev_err(dev, "Cannot get IRQ resource\n");
1956                 return irq;
1957         }
1958
1959         ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
1960                                         stm32_cryp_irq_thread, IRQF_ONESHOT,
1961                                         dev_name(dev), cryp);
1962         if (ret) {
1963                 dev_err(dev, "Cannot grab IRQ\n");
1964                 return ret;
1965         }
1966
1967         cryp->clk = devm_clk_get(dev, NULL);
1968         if (IS_ERR(cryp->clk)) {
1969                 dev_err(dev, "Could not get clock\n");
1970                 return PTR_ERR(cryp->clk);
1971         }
1972
1973         ret = clk_prepare_enable(cryp->clk);
1974         if (ret) {
1975                 dev_err(cryp->dev, "Failed to enable clock\n");
1976                 return ret;
1977         }
1978
1979         pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
1980         pm_runtime_use_autosuspend(dev);
1981
1982         pm_runtime_get_noresume(dev);
1983         pm_runtime_set_active(dev);
1984         pm_runtime_enable(dev);
1985
1986         rst = devm_reset_control_get(dev, NULL);
1987         if (!IS_ERR(rst)) {
1988                 reset_control_assert(rst);
1989                 udelay(2);
1990                 reset_control_deassert(rst);
1991         }
1992
1993         platform_set_drvdata(pdev, cryp);
1994
1995         spin_lock(&cryp_list.lock);
1996         list_add(&cryp->list, &cryp_list.dev_list);
1997         spin_unlock(&cryp_list.lock);
1998
1999         /* Initialize crypto engine */
2000         cryp->engine = crypto_engine_alloc_init(dev, 1);
2001         if (!cryp->engine) {
2002                 dev_err(dev, "Could not init crypto engine\n");
2003                 ret = -ENOMEM;
2004                 goto err_engine1;
2005         }
2006
2007         ret = crypto_engine_start(cryp->engine);
2008         if (ret) {
2009                 dev_err(dev, "Could not start crypto engine\n");
2010                 goto err_engine2;
2011         }
2012
2013         ret = crypto_register_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
2014         if (ret) {
2015                 dev_err(dev, "Could not register algs\n");
2016                 goto err_algs;
2017         }
2018
2019         ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2020         if (ret)
2021                 goto err_aead_algs;
2022
2023         dev_info(dev, "Initialized\n");
2024
2025         pm_runtime_put_sync(dev);
2026
2027         return 0;
2028
2029 err_aead_algs:
2030         crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
2031 err_algs:
2032 err_engine2:
2033         crypto_engine_exit(cryp->engine);
2034 err_engine1:
2035         spin_lock(&cryp_list.lock);
2036         list_del(&cryp->list);
2037         spin_unlock(&cryp_list.lock);
2038
2039         pm_runtime_disable(dev);
2040         pm_runtime_put_noidle(dev);
2041
2042         clk_disable_unprepare(cryp->clk);
2043
2044         return ret;
2045 }
2046
2047 static int stm32_cryp_remove(struct platform_device *pdev)
2048 {
2049         struct stm32_cryp *cryp = platform_get_drvdata(pdev);
2050         int ret;
2051
2052         if (!cryp)
2053                 return -ENODEV;
2054
2055         ret = pm_runtime_get_sync(cryp->dev);
2056         if (ret < 0)
2057                 return ret;
2058
2059         crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2060         crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
2061
2062         crypto_engine_exit(cryp->engine);
2063
2064         spin_lock(&cryp_list.lock);
2065         list_del(&cryp->list);
2066         spin_unlock(&cryp_list.lock);
2067
2068         pm_runtime_disable(cryp->dev);
2069         pm_runtime_put_noidle(cryp->dev);
2070
2071         clk_disable_unprepare(cryp->clk);
2072
2073         return 0;
2074 }
2075
2076 #ifdef CONFIG_PM
2077 static int stm32_cryp_runtime_suspend(struct device *dev)
2078 {
2079         struct stm32_cryp *cryp = dev_get_drvdata(dev);
2080
2081         clk_disable_unprepare(cryp->clk);
2082
2083         return 0;
2084 }
2085
2086 static int stm32_cryp_runtime_resume(struct device *dev)
2087 {
2088         struct stm32_cryp *cryp = dev_get_drvdata(dev);
2089         int ret;
2090
2091         ret = clk_prepare_enable(cryp->clk);
2092         if (ret) {
2093                 dev_err(cryp->dev, "Failed to prepare_enable clock\n");
2094                 return ret;
2095         }
2096
2097         return 0;
2098 }
2099 #endif
2100
2101 static const struct dev_pm_ops stm32_cryp_pm_ops = {
2102         SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2103                                 pm_runtime_force_resume)
2104         SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
2105                            stm32_cryp_runtime_resume, NULL)
2106 };
2107
2108 static struct platform_driver stm32_cryp_driver = {
2109         .probe  = stm32_cryp_probe,
2110         .remove = stm32_cryp_remove,
2111         .driver = {
2112                 .name           = DRIVER_NAME,
2113                 .pm             = &stm32_cryp_pm_ops,
2114                 .of_match_table = stm32_dt_ids,
2115         },
2116 };
2117
2118 module_platform_driver(stm32_cryp_driver);
2119
2120 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
2121 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
2122 MODULE_LICENSE("GPL");