GNU Linux-libre 5.10.153-gnu1
[releases.git] / drivers / crypto / ccree / cc_cipher.c
1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
3
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/internal/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/sm4.h>
11 #include <crypto/scatterwalk.h>
12
13 #include "cc_driver.h"
14 #include "cc_lli_defs.h"
15 #include "cc_buffer_mgr.h"
16 #include "cc_cipher.h"
17 #include "cc_request_mgr.h"
18
19 #define MAX_SKCIPHER_SEQ_LEN 6
20
21 #define template_skcipher       template_u.skcipher
22
23 struct cc_user_key_info {
24         u8 *key;
25         dma_addr_t key_dma_addr;
26 };
27
28 struct cc_hw_key_info {
29         enum cc_hw_crypto_key key1_slot;
30         enum cc_hw_crypto_key key2_slot;
31 };
32
33 struct cc_cpp_key_info {
34         u8 slot;
35         enum cc_cpp_alg alg;
36 };
37
38 enum cc_key_type {
39         CC_UNPROTECTED_KEY,             /* User key */
40         CC_HW_PROTECTED_KEY,            /* HW (FDE) key */
41         CC_POLICY_PROTECTED_KEY,        /* CPP key */
42         CC_INVALID_PROTECTED_KEY        /* Invalid key */
43 };
44
45 struct cc_cipher_ctx {
46         struct cc_drvdata *drvdata;
47         int keylen;
48         int cipher_mode;
49         int flow_mode;
50         unsigned int flags;
51         enum cc_key_type key_type;
52         struct cc_user_key_info user;
53         union {
54                 struct cc_hw_key_info hw;
55                 struct cc_cpp_key_info cpp;
56         };
57         struct crypto_shash *shash_tfm;
58         struct crypto_skcipher *fallback_tfm;
59         bool fallback_on;
60 };
61
62 static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
63
64 static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
65 {
66         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
67
68         return ctx_p->key_type;
69 }
70
71 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
72 {
73         switch (ctx_p->flow_mode) {
74         case S_DIN_to_AES:
75                 switch (size) {
76                 case CC_AES_128_BIT_KEY_SIZE:
77                 case CC_AES_192_BIT_KEY_SIZE:
78                         if (ctx_p->cipher_mode != DRV_CIPHER_XTS)
79                                 return 0;
80                         break;
81                 case CC_AES_256_BIT_KEY_SIZE:
82                         return 0;
83                 case (CC_AES_192_BIT_KEY_SIZE * 2):
84                 case (CC_AES_256_BIT_KEY_SIZE * 2):
85                         if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
86                             ctx_p->cipher_mode == DRV_CIPHER_ESSIV)
87                                 return 0;
88                         break;
89                 default:
90                         break;
91                 }
92                 break;
93         case S_DIN_to_DES:
94                 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
95                         return 0;
96                 break;
97         case S_DIN_to_SM4:
98                 if (size == SM4_KEY_SIZE)
99                         return 0;
100         default:
101                 break;
102         }
103         return -EINVAL;
104 }
105
106 static int validate_data_size(struct cc_cipher_ctx *ctx_p,
107                               unsigned int size)
108 {
109         switch (ctx_p->flow_mode) {
110         case S_DIN_to_AES:
111                 switch (ctx_p->cipher_mode) {
112                 case DRV_CIPHER_XTS:
113                 case DRV_CIPHER_CBC_CTS:
114                         if (size >= AES_BLOCK_SIZE)
115                                 return 0;
116                         break;
117                 case DRV_CIPHER_OFB:
118                 case DRV_CIPHER_CTR:
119                                 return 0;
120                 case DRV_CIPHER_ECB:
121                 case DRV_CIPHER_CBC:
122                 case DRV_CIPHER_ESSIV:
123                         if (IS_ALIGNED(size, AES_BLOCK_SIZE))
124                                 return 0;
125                         break;
126                 default:
127                         break;
128                 }
129                 break;
130         case S_DIN_to_DES:
131                 if (IS_ALIGNED(size, DES_BLOCK_SIZE))
132                         return 0;
133                 break;
134         case S_DIN_to_SM4:
135                 switch (ctx_p->cipher_mode) {
136                 case DRV_CIPHER_CTR:
137                         return 0;
138                 case DRV_CIPHER_ECB:
139                 case DRV_CIPHER_CBC:
140                         if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
141                                 return 0;
142                 default:
143                         break;
144                 }
145         default:
146                 break;
147         }
148         return -EINVAL;
149 }
150
151 static int cc_cipher_init(struct crypto_tfm *tfm)
152 {
153         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
154         struct cc_crypto_alg *cc_alg =
155                         container_of(tfm->__crt_alg, struct cc_crypto_alg,
156                                      skcipher_alg.base);
157         struct device *dev = drvdata_to_dev(cc_alg->drvdata);
158         unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
159         unsigned int fallback_req_size = 0;
160
161         dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
162                 crypto_tfm_alg_name(tfm));
163
164         ctx_p->cipher_mode = cc_alg->cipher_mode;
165         ctx_p->flow_mode = cc_alg->flow_mode;
166         ctx_p->drvdata = cc_alg->drvdata;
167
168         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
169                 const char *name = crypto_tfm_alg_name(tfm);
170
171                 /* Alloc hash tfm for essiv */
172                 ctx_p->shash_tfm = crypto_alloc_shash("sha256", 0, 0);
173                 if (IS_ERR(ctx_p->shash_tfm)) {
174                         dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
175                         return PTR_ERR(ctx_p->shash_tfm);
176                 }
177                 max_key_buf_size <<= 1;
178
179                 /* Alloc fallabck tfm or essiv when key size != 256 bit */
180                 ctx_p->fallback_tfm =
181                         crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC);
182
183                 if (IS_ERR(ctx_p->fallback_tfm)) {
184                         /* Note we're still allowing registration with no fallback since it's
185                          * better to have most modes supported than none at all.
186                          */
187                         dev_warn(dev, "Error allocating fallback algo %s. Some modes may be available.\n",
188                                name);
189                         ctx_p->fallback_tfm = NULL;
190                 } else {
191                         fallback_req_size = crypto_skcipher_reqsize(ctx_p->fallback_tfm);
192                 }
193         }
194
195         crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
196                                     sizeof(struct cipher_req_ctx) + fallback_req_size);
197
198         /* Allocate key buffer, cache line aligned */
199         ctx_p->user.key = kzalloc(max_key_buf_size, GFP_KERNEL);
200         if (!ctx_p->user.key)
201                 goto free_fallback;
202
203         dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
204                 ctx_p->user.key);
205
206         /* Map key buffer */
207         ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key,
208                                                   max_key_buf_size,
209                                                   DMA_TO_DEVICE);
210         if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
211                 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
212                         max_key_buf_size, ctx_p->user.key);
213                 goto free_key;
214         }
215         dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
216                 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
217
218         return 0;
219
220 free_key:
221         kfree(ctx_p->user.key);
222 free_fallback:
223         crypto_free_skcipher(ctx_p->fallback_tfm);
224         crypto_free_shash(ctx_p->shash_tfm);
225
226         return -ENOMEM;
227 }
228
229 static void cc_cipher_exit(struct crypto_tfm *tfm)
230 {
231         struct crypto_alg *alg = tfm->__crt_alg;
232         struct cc_crypto_alg *cc_alg =
233                         container_of(alg, struct cc_crypto_alg,
234                                      skcipher_alg.base);
235         unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
236         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
237         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
238
239         dev_dbg(dev, "Clearing context @%p for %s\n",
240                 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
241
242         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
243                 /* Free hash tfm for essiv */
244                 crypto_free_shash(ctx_p->shash_tfm);
245                 ctx_p->shash_tfm = NULL;
246                 crypto_free_skcipher(ctx_p->fallback_tfm);
247                 ctx_p->fallback_tfm = NULL;
248         }
249
250         /* Unmap key buffer */
251         dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
252                          DMA_TO_DEVICE);
253         dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
254                 &ctx_p->user.key_dma_addr);
255
256         /* Free key buffer in context */
257         dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
258         kfree_sensitive(ctx_p->user.key);
259 }
260
261 struct tdes_keys {
262         u8      key1[DES_KEY_SIZE];
263         u8      key2[DES_KEY_SIZE];
264         u8      key3[DES_KEY_SIZE];
265 };
266
267 static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
268 {
269         switch (slot_num) {
270         case 0:
271                 return KFDE0_KEY;
272         case 1:
273                 return KFDE1_KEY;
274         case 2:
275                 return KFDE2_KEY;
276         case 3:
277                 return KFDE3_KEY;
278         }
279         return END_OF_KEYS;
280 }
281
282 static u8 cc_slot_to_cpp_key(u8 slot_num)
283 {
284         return (slot_num - CC_FIRST_CPP_KEY_SLOT);
285 }
286
287 static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
288 {
289         if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
290                 return CC_HW_PROTECTED_KEY;
291         else if (slot_num >=  CC_FIRST_CPP_KEY_SLOT &&
292                  slot_num <=  CC_LAST_CPP_KEY_SLOT)
293                 return CC_POLICY_PROTECTED_KEY;
294         else
295                 return CC_INVALID_PROTECTED_KEY;
296 }
297
298 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
299                              unsigned int keylen)
300 {
301         struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
302         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
303         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
304         struct cc_hkey_info hki;
305
306         dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
307                 ctx_p, crypto_tfm_alg_name(tfm), keylen);
308         dump_byte_array("key", key, keylen);
309
310         /* STAT_PHASE_0: Init and sanity checks */
311
312         /* This check the size of the protected key token */
313         if (keylen != sizeof(hki)) {
314                 dev_err(dev, "Unsupported protected key size %d.\n", keylen);
315                 return -EINVAL;
316         }
317
318         memcpy(&hki, key, keylen);
319
320         /* The real key len for crypto op is the size of the HW key
321          * referenced by the HW key slot, not the hardware key token
322          */
323         keylen = hki.keylen;
324
325         if (validate_keys_sizes(ctx_p, keylen)) {
326                 dev_dbg(dev, "Unsupported key size %d.\n", keylen);
327                 return -EINVAL;
328         }
329
330         ctx_p->keylen = keylen;
331         ctx_p->fallback_on = false;
332
333         switch (cc_slot_to_key_type(hki.hw_key1)) {
334         case CC_HW_PROTECTED_KEY:
335                 if (ctx_p->flow_mode == S_DIN_to_SM4) {
336                         dev_err(dev, "Only AES HW protected keys are supported\n");
337                         return -EINVAL;
338                 }
339
340                 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
341                 if (ctx_p->hw.key1_slot == END_OF_KEYS) {
342                         dev_err(dev, "Unsupported hw key1 number (%d)\n",
343                                 hki.hw_key1);
344                         return -EINVAL;
345                 }
346
347                 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
348                     ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
349                         if (hki.hw_key1 == hki.hw_key2) {
350                                 dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
351                                         hki.hw_key1, hki.hw_key2);
352                                 return -EINVAL;
353                         }
354
355                         ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
356                         if (ctx_p->hw.key2_slot == END_OF_KEYS) {
357                                 dev_err(dev, "Unsupported hw key2 number (%d)\n",
358                                         hki.hw_key2);
359                                 return -EINVAL;
360                         }
361                 }
362
363                 ctx_p->key_type = CC_HW_PROTECTED_KEY;
364                 dev_dbg(dev, "HW protected key  %d/%d set\n.",
365                         ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
366                 break;
367
368         case CC_POLICY_PROTECTED_KEY:
369                 if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
370                         dev_err(dev, "CPP keys not supported in this hardware revision.\n");
371                         return -EINVAL;
372                 }
373
374                 if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
375                     ctx_p->cipher_mode != DRV_CIPHER_CTR) {
376                         dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
377                         return -EINVAL;
378                 }
379
380                 ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
381                 if (ctx_p->flow_mode == S_DIN_to_AES)
382                         ctx_p->cpp.alg = CC_CPP_AES;
383                 else /* Must be SM4 since due to sethkey registration */
384                         ctx_p->cpp.alg = CC_CPP_SM4;
385                 ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
386                 dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
387                         ctx_p->cpp.alg, ctx_p->cpp.slot);
388                 break;
389
390         default:
391                 dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
392                 return -EINVAL;
393         }
394
395         return 0;
396 }
397
398 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
399                             unsigned int keylen)
400 {
401         struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
402         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
403         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
404         struct cc_crypto_alg *cc_alg =
405                         container_of(tfm->__crt_alg, struct cc_crypto_alg,
406                                      skcipher_alg.base);
407         unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
408
409         dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
410                 ctx_p, crypto_tfm_alg_name(tfm), keylen);
411         dump_byte_array("key", key, keylen);
412
413         /* STAT_PHASE_0: Init and sanity checks */
414
415         if (validate_keys_sizes(ctx_p, keylen)) {
416                 dev_dbg(dev, "Invalid key size %d.\n", keylen);
417                 return -EINVAL;
418         }
419
420         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
421
422                 /* We only support 256 bit ESSIV-CBC-AES keys */
423                 if (keylen != AES_KEYSIZE_256)  {
424                         unsigned int flags = crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_MASK;
425
426                         if (likely(ctx_p->fallback_tfm)) {
427                                 ctx_p->fallback_on = true;
428                                 crypto_skcipher_clear_flags(ctx_p->fallback_tfm,
429                                                             CRYPTO_TFM_REQ_MASK);
430                                 crypto_skcipher_clear_flags(ctx_p->fallback_tfm, flags);
431                                 return crypto_skcipher_setkey(ctx_p->fallback_tfm, key, keylen);
432                         }
433
434                         dev_dbg(dev, "Unsupported key size %d and no fallback.\n", keylen);
435                         return -EINVAL;
436                 }
437
438                 /* Internal ESSIV key buffer is double sized */
439                 max_key_buf_size <<= 1;
440         }
441
442         ctx_p->fallback_on = false;
443         ctx_p->key_type = CC_UNPROTECTED_KEY;
444
445         /*
446          * Verify DES weak keys
447          * Note that we're dropping the expanded key since the
448          * HW does the expansion on its own.
449          */
450         if (ctx_p->flow_mode == S_DIN_to_DES) {
451                 if ((keylen == DES3_EDE_KEY_SIZE &&
452                      verify_skcipher_des3_key(sktfm, key)) ||
453                     verify_skcipher_des_key(sktfm, key)) {
454                         dev_dbg(dev, "weak DES key");
455                         return -EINVAL;
456                 }
457         }
458
459         if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
460             xts_check_key(tfm, key, keylen)) {
461                 dev_dbg(dev, "weak XTS key");
462                 return -EINVAL;
463         }
464
465         /* STAT_PHASE_1: Copy key to ctx */
466         dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
467                                 max_key_buf_size, DMA_TO_DEVICE);
468
469         memcpy(ctx_p->user.key, key, keylen);
470
471         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
472                 /* sha256 for key2 - use sw implementation */
473                 int err;
474
475                 err = crypto_shash_tfm_digest(ctx_p->shash_tfm,
476                                               ctx_p->user.key, keylen,
477                                               ctx_p->user.key + keylen);
478                 if (err) {
479                         dev_err(dev, "Failed to hash ESSIV key.\n");
480                         return err;
481                 }
482
483                 keylen <<= 1;
484         }
485         dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
486                                    max_key_buf_size, DMA_TO_DEVICE);
487         ctx_p->keylen = keylen;
488
489         dev_dbg(dev, "return safely");
490         return 0;
491 }
492
493 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
494 {
495         switch (ctx_p->flow_mode) {
496         case S_DIN_to_AES:
497                 return S_AES_to_DOUT;
498         case S_DIN_to_DES:
499                 return S_DES_to_DOUT;
500         case S_DIN_to_SM4:
501                 return S_SM4_to_DOUT;
502         default:
503                 return ctx_p->flow_mode;
504         }
505 }
506
507 static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
508                                  struct cipher_req_ctx *req_ctx,
509                                  unsigned int ivsize, struct cc_hw_desc desc[],
510                                  unsigned int *seq_size)
511 {
512         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
513         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
514         int cipher_mode = ctx_p->cipher_mode;
515         int flow_mode = cc_out_setup_mode(ctx_p);
516         int direction = req_ctx->gen_ctx.op_type;
517         dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
518
519         if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
520                 return;
521
522         switch (cipher_mode) {
523         case DRV_CIPHER_ECB:
524                 break;
525         case DRV_CIPHER_CBC:
526         case DRV_CIPHER_CBC_CTS:
527         case DRV_CIPHER_CTR:
528         case DRV_CIPHER_OFB:
529                 /* Read next IV */
530                 hw_desc_init(&desc[*seq_size]);
531                 set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
532                 set_cipher_config0(&desc[*seq_size], direction);
533                 set_flow_mode(&desc[*seq_size], flow_mode);
534                 set_cipher_mode(&desc[*seq_size], cipher_mode);
535                 if (cipher_mode == DRV_CIPHER_CTR ||
536                     cipher_mode == DRV_CIPHER_OFB) {
537                         set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
538                 } else {
539                         set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
540                 }
541                 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
542                 (*seq_size)++;
543                 break;
544         case DRV_CIPHER_XTS:
545         case DRV_CIPHER_ESSIV:
546                 /*  IV */
547                 hw_desc_init(&desc[*seq_size]);
548                 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
549                 set_cipher_mode(&desc[*seq_size], cipher_mode);
550                 set_cipher_config0(&desc[*seq_size], direction);
551                 set_flow_mode(&desc[*seq_size], flow_mode);
552                 set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
553                              NS_BIT, 1);
554                 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
555                 (*seq_size)++;
556                 break;
557         default:
558                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
559         }
560 }
561
562
563 static void cc_setup_state_desc(struct crypto_tfm *tfm,
564                                  struct cipher_req_ctx *req_ctx,
565                                  unsigned int ivsize, unsigned int nbytes,
566                                  struct cc_hw_desc desc[],
567                                  unsigned int *seq_size)
568 {
569         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
570         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
571         int cipher_mode = ctx_p->cipher_mode;
572         int flow_mode = ctx_p->flow_mode;
573         int direction = req_ctx->gen_ctx.op_type;
574         dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
575
576         switch (cipher_mode) {
577         case DRV_CIPHER_ECB:
578                 break;
579         case DRV_CIPHER_CBC:
580         case DRV_CIPHER_CBC_CTS:
581         case DRV_CIPHER_CTR:
582         case DRV_CIPHER_OFB:
583                 /* Load IV */
584                 hw_desc_init(&desc[*seq_size]);
585                 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
586                              NS_BIT);
587                 set_cipher_config0(&desc[*seq_size], direction);
588                 set_flow_mode(&desc[*seq_size], flow_mode);
589                 set_cipher_mode(&desc[*seq_size], cipher_mode);
590                 if (cipher_mode == DRV_CIPHER_CTR ||
591                     cipher_mode == DRV_CIPHER_OFB) {
592                         set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
593                 } else {
594                         set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
595                 }
596                 (*seq_size)++;
597                 break;
598         case DRV_CIPHER_XTS:
599         case DRV_CIPHER_ESSIV:
600                 break;
601         default:
602                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
603         }
604 }
605
606
607 static void cc_setup_xex_state_desc(struct crypto_tfm *tfm,
608                                  struct cipher_req_ctx *req_ctx,
609                                  unsigned int ivsize, unsigned int nbytes,
610                                  struct cc_hw_desc desc[],
611                                  unsigned int *seq_size)
612 {
613         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
614         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
615         int cipher_mode = ctx_p->cipher_mode;
616         int flow_mode = ctx_p->flow_mode;
617         int direction = req_ctx->gen_ctx.op_type;
618         dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
619         unsigned int key_len = (ctx_p->keylen / 2);
620         dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
621         unsigned int key_offset = key_len;
622
623         switch (cipher_mode) {
624         case DRV_CIPHER_ECB:
625                 break;
626         case DRV_CIPHER_CBC:
627         case DRV_CIPHER_CBC_CTS:
628         case DRV_CIPHER_CTR:
629         case DRV_CIPHER_OFB:
630                 break;
631         case DRV_CIPHER_XTS:
632         case DRV_CIPHER_ESSIV:
633
634                 if (cipher_mode == DRV_CIPHER_ESSIV)
635                         key_len = SHA256_DIGEST_SIZE;
636
637                 /* load XEX key */
638                 hw_desc_init(&desc[*seq_size]);
639                 set_cipher_mode(&desc[*seq_size], cipher_mode);
640                 set_cipher_config0(&desc[*seq_size], direction);
641                 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
642                         set_hw_crypto_key(&desc[*seq_size],
643                                           ctx_p->hw.key2_slot);
644                 } else {
645                         set_din_type(&desc[*seq_size], DMA_DLLI,
646                                      (key_dma_addr + key_offset),
647                                      key_len, NS_BIT);
648                 }
649                 set_xex_data_unit_size(&desc[*seq_size], nbytes);
650                 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
651                 set_key_size_aes(&desc[*seq_size], key_len);
652                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
653                 (*seq_size)++;
654
655                 /* Load IV */
656                 hw_desc_init(&desc[*seq_size]);
657                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
658                 set_cipher_mode(&desc[*seq_size], cipher_mode);
659                 set_cipher_config0(&desc[*seq_size], direction);
660                 set_key_size_aes(&desc[*seq_size], key_len);
661                 set_flow_mode(&desc[*seq_size], flow_mode);
662                 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
663                              CC_AES_BLOCK_SIZE, NS_BIT);
664                 (*seq_size)++;
665                 break;
666         default:
667                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
668         }
669 }
670
671 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
672 {
673         switch (ctx_p->flow_mode) {
674         case S_DIN_to_AES:
675                 return DIN_AES_DOUT;
676         case S_DIN_to_DES:
677                 return DIN_DES_DOUT;
678         case S_DIN_to_SM4:
679                 return DIN_SM4_DOUT;
680         default:
681                 return ctx_p->flow_mode;
682         }
683 }
684
685 static void cc_setup_key_desc(struct crypto_tfm *tfm,
686                               struct cipher_req_ctx *req_ctx,
687                               unsigned int nbytes, struct cc_hw_desc desc[],
688                               unsigned int *seq_size)
689 {
690         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
691         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
692         int cipher_mode = ctx_p->cipher_mode;
693         int flow_mode = ctx_p->flow_mode;
694         int direction = req_ctx->gen_ctx.op_type;
695         dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
696         unsigned int key_len = ctx_p->keylen;
697         unsigned int din_size;
698
699         switch (cipher_mode) {
700         case DRV_CIPHER_CBC:
701         case DRV_CIPHER_CBC_CTS:
702         case DRV_CIPHER_CTR:
703         case DRV_CIPHER_OFB:
704         case DRV_CIPHER_ECB:
705                 /* Load key */
706                 hw_desc_init(&desc[*seq_size]);
707                 set_cipher_mode(&desc[*seq_size], cipher_mode);
708                 set_cipher_config0(&desc[*seq_size], direction);
709
710                 if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
711                         /* We use the AES key size coding for all CPP algs */
712                         set_key_size_aes(&desc[*seq_size], key_len);
713                         set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
714                         flow_mode = cc_out_flow_mode(ctx_p);
715                 } else {
716                         if (flow_mode == S_DIN_to_AES) {
717                                 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
718                                         set_hw_crypto_key(&desc[*seq_size],
719                                                           ctx_p->hw.key1_slot);
720                                 } else {
721                                         /* CC_POLICY_UNPROTECTED_KEY
722                                          * Invalid keys are filtered out in
723                                          * sethkey()
724                                          */
725                                         din_size = (key_len == 24) ?
726                                                 AES_MAX_KEY_SIZE : key_len;
727
728                                         set_din_type(&desc[*seq_size], DMA_DLLI,
729                                                      key_dma_addr, din_size,
730                                                      NS_BIT);
731                                 }
732                                 set_key_size_aes(&desc[*seq_size], key_len);
733                         } else {
734                                 /*des*/
735                                 set_din_type(&desc[*seq_size], DMA_DLLI,
736                                              key_dma_addr, key_len, NS_BIT);
737                                 set_key_size_des(&desc[*seq_size], key_len);
738                         }
739                         set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
740                 }
741                 set_flow_mode(&desc[*seq_size], flow_mode);
742                 (*seq_size)++;
743                 break;
744         case DRV_CIPHER_XTS:
745         case DRV_CIPHER_ESSIV:
746                 /* Load AES key */
747                 hw_desc_init(&desc[*seq_size]);
748                 set_cipher_mode(&desc[*seq_size], cipher_mode);
749                 set_cipher_config0(&desc[*seq_size], direction);
750                 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
751                         set_hw_crypto_key(&desc[*seq_size],
752                                           ctx_p->hw.key1_slot);
753                 } else {
754                         set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
755                                      (key_len / 2), NS_BIT);
756                 }
757                 set_key_size_aes(&desc[*seq_size], (key_len / 2));
758                 set_flow_mode(&desc[*seq_size], flow_mode);
759                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
760                 (*seq_size)++;
761                 break;
762         default:
763                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
764         }
765 }
766
767 static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
768                                struct cipher_req_ctx *req_ctx,
769                                struct scatterlist *dst, struct scatterlist *src,
770                                unsigned int nbytes, void *areq,
771                                struct cc_hw_desc desc[], unsigned int *seq_size)
772 {
773         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
774         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
775
776         if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
777                 /* bypass */
778                 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
779                         &req_ctx->mlli_params.mlli_dma_addr,
780                         req_ctx->mlli_params.mlli_len,
781                         ctx_p->drvdata->mlli_sram_addr);
782                 hw_desc_init(&desc[*seq_size]);
783                 set_din_type(&desc[*seq_size], DMA_DLLI,
784                              req_ctx->mlli_params.mlli_dma_addr,
785                              req_ctx->mlli_params.mlli_len, NS_BIT);
786                 set_dout_sram(&desc[*seq_size],
787                               ctx_p->drvdata->mlli_sram_addr,
788                               req_ctx->mlli_params.mlli_len);
789                 set_flow_mode(&desc[*seq_size], BYPASS);
790                 (*seq_size)++;
791         }
792 }
793
794 static void cc_setup_flow_desc(struct crypto_tfm *tfm,
795                                struct cipher_req_ctx *req_ctx,
796                                struct scatterlist *dst, struct scatterlist *src,
797                                unsigned int nbytes, struct cc_hw_desc desc[],
798                                unsigned int *seq_size)
799 {
800         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
801         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
802         unsigned int flow_mode = cc_out_flow_mode(ctx_p);
803         bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
804                           ctx_p->cipher_mode == DRV_CIPHER_ECB);
805
806         /* Process */
807         if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
808                 dev_dbg(dev, " data params addr %pad length 0x%X\n",
809                         &sg_dma_address(src), nbytes);
810                 dev_dbg(dev, " data params addr %pad length 0x%X\n",
811                         &sg_dma_address(dst), nbytes);
812                 hw_desc_init(&desc[*seq_size]);
813                 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
814                              nbytes, NS_BIT);
815                 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
816                               nbytes, NS_BIT, (!last_desc ? 0 : 1));
817                 if (last_desc)
818                         set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
819
820                 set_flow_mode(&desc[*seq_size], flow_mode);
821                 (*seq_size)++;
822         } else {
823                 hw_desc_init(&desc[*seq_size]);
824                 set_din_type(&desc[*seq_size], DMA_MLLI,
825                              ctx_p->drvdata->mlli_sram_addr,
826                              req_ctx->in_mlli_nents, NS_BIT);
827                 if (req_ctx->out_nents == 0) {
828                         dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
829                                 ctx_p->drvdata->mlli_sram_addr,
830                                 ctx_p->drvdata->mlli_sram_addr);
831                         set_dout_mlli(&desc[*seq_size],
832                                       ctx_p->drvdata->mlli_sram_addr,
833                                       req_ctx->in_mlli_nents, NS_BIT,
834                                       (!last_desc ? 0 : 1));
835                 } else {
836                         dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
837                                 ctx_p->drvdata->mlli_sram_addr,
838                                 ctx_p->drvdata->mlli_sram_addr +
839                                 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
840                         set_dout_mlli(&desc[*seq_size],
841                                       (ctx_p->drvdata->mlli_sram_addr +
842                                        (LLI_ENTRY_BYTE_SIZE *
843                                         req_ctx->in_mlli_nents)),
844                                       req_ctx->out_mlli_nents, NS_BIT,
845                                       (!last_desc ? 0 : 1));
846                 }
847                 if (last_desc)
848                         set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
849
850                 set_flow_mode(&desc[*seq_size], flow_mode);
851                 (*seq_size)++;
852         }
853 }
854
855 static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
856 {
857         struct skcipher_request *req = (struct skcipher_request *)cc_req;
858         struct scatterlist *dst = req->dst;
859         struct scatterlist *src = req->src;
860         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
861         struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
862         unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
863
864         if (err != -EINPROGRESS) {
865                 /* Not a BACKLOG notification */
866                 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
867                 memcpy(req->iv, req_ctx->iv, ivsize);
868                 kfree_sensitive(req_ctx->iv);
869         }
870
871         skcipher_request_complete(req, err);
872 }
873
874 static int cc_cipher_process(struct skcipher_request *req,
875                              enum drv_crypto_direction direction)
876 {
877         struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
878         struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
879         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
880         unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
881         struct scatterlist *dst = req->dst;
882         struct scatterlist *src = req->src;
883         unsigned int nbytes = req->cryptlen;
884         void *iv = req->iv;
885         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
886         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
887         struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN];
888         struct cc_crypto_req cc_req = {};
889         int rc;
890         unsigned int seq_len = 0;
891         gfp_t flags = cc_gfp_flags(&req->base);
892
893         dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
894                 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
895                 "Encrypt" : "Decrypt"), req, iv, nbytes);
896
897         /* STAT_PHASE_0: Init and sanity checks */
898
899         if (validate_data_size(ctx_p, nbytes)) {
900                 dev_dbg(dev, "Unsupported data size %d.\n", nbytes);
901                 rc = -EINVAL;
902                 goto exit_process;
903         }
904         if (nbytes == 0) {
905                 /* No data to process is valid */
906                 rc = 0;
907                 goto exit_process;
908         }
909
910         if (ctx_p->fallback_on) {
911                 struct skcipher_request *subreq = skcipher_request_ctx(req);
912
913                 *subreq = *req;
914                 skcipher_request_set_tfm(subreq, ctx_p->fallback_tfm);
915                 if (direction == DRV_CRYPTO_DIRECTION_ENCRYPT)
916                         return crypto_skcipher_encrypt(subreq);
917                 else
918                         return crypto_skcipher_decrypt(subreq);
919         }
920
921         /* The IV we are handed may be allocted from the stack so
922          * we must copy it to a DMAable buffer before use.
923          */
924         req_ctx->iv = kmemdup(iv, ivsize, flags);
925         if (!req_ctx->iv) {
926                 rc = -ENOMEM;
927                 goto exit_process;
928         }
929
930         /* Setup request structure */
931         cc_req.user_cb = cc_cipher_complete;
932         cc_req.user_arg = req;
933
934         /* Setup CPP operation details */
935         if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
936                 cc_req.cpp.is_cpp = true;
937                 cc_req.cpp.alg = ctx_p->cpp.alg;
938                 cc_req.cpp.slot = ctx_p->cpp.slot;
939         }
940
941         /* Setup request context */
942         req_ctx->gen_ctx.op_type = direction;
943
944         /* STAT_PHASE_1: Map buffers */
945
946         rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
947                                       req_ctx->iv, src, dst, flags);
948         if (rc) {
949                 dev_err(dev, "map_request() failed\n");
950                 goto exit_process;
951         }
952
953         /* STAT_PHASE_2: Create sequence */
954
955         /* Setup state (IV)  */
956         cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
957         /* Setup MLLI line, if needed */
958         cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
959         /* Setup key */
960         cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
961         /* Setup state (IV and XEX key)  */
962         cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
963         /* Data processing */
964         cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
965         /* Read next IV */
966         cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
967
968         /* STAT_PHASE_3: Lock HW and push sequence */
969
970         rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
971                              &req->base);
972         if (rc != -EINPROGRESS && rc != -EBUSY) {
973                 /* Failed to send the request or request completed
974                  * synchronously
975                  */
976                 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
977         }
978
979 exit_process:
980         if (rc != -EINPROGRESS && rc != -EBUSY) {
981                 kfree_sensitive(req_ctx->iv);
982         }
983
984         return rc;
985 }
986
987 static int cc_cipher_encrypt(struct skcipher_request *req)
988 {
989         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
990
991         memset(req_ctx, 0, sizeof(*req_ctx));
992
993         return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
994 }
995
996 static int cc_cipher_decrypt(struct skcipher_request *req)
997 {
998         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
999
1000         memset(req_ctx, 0, sizeof(*req_ctx));
1001
1002         return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
1003 }
1004
1005 /* Block cipher alg */
1006 static const struct cc_alg_template skcipher_algs[] = {
1007         {
1008                 .name = "xts(paes)",
1009                 .driver_name = "xts-paes-ccree",
1010                 .blocksize = 1,
1011                 .template_skcipher = {
1012                         .setkey = cc_cipher_sethkey,
1013                         .encrypt = cc_cipher_encrypt,
1014                         .decrypt = cc_cipher_decrypt,
1015                         .min_keysize = CC_HW_KEY_SIZE,
1016                         .max_keysize = CC_HW_KEY_SIZE,
1017                         .ivsize = AES_BLOCK_SIZE,
1018                         },
1019                 .cipher_mode = DRV_CIPHER_XTS,
1020                 .flow_mode = S_DIN_to_AES,
1021                 .min_hw_rev = CC_HW_REV_630,
1022                 .std_body = CC_STD_NIST,
1023                 .sec_func = true,
1024         },
1025         {
1026                 .name = "essiv(cbc(paes),sha256)",
1027                 .driver_name = "essiv-paes-ccree",
1028                 .blocksize = AES_BLOCK_SIZE,
1029                 .template_skcipher = {
1030                         .setkey = cc_cipher_sethkey,
1031                         .encrypt = cc_cipher_encrypt,
1032                         .decrypt = cc_cipher_decrypt,
1033                         .min_keysize = CC_HW_KEY_SIZE,
1034                         .max_keysize = CC_HW_KEY_SIZE,
1035                         .ivsize = AES_BLOCK_SIZE,
1036                         },
1037                 .cipher_mode = DRV_CIPHER_ESSIV,
1038                 .flow_mode = S_DIN_to_AES,
1039                 .min_hw_rev = CC_HW_REV_712,
1040                 .std_body = CC_STD_NIST,
1041                 .sec_func = true,
1042         },
1043         {
1044                 .name = "ecb(paes)",
1045                 .driver_name = "ecb-paes-ccree",
1046                 .blocksize = AES_BLOCK_SIZE,
1047                 .template_skcipher = {
1048                         .setkey = cc_cipher_sethkey,
1049                         .encrypt = cc_cipher_encrypt,
1050                         .decrypt = cc_cipher_decrypt,
1051                         .min_keysize = CC_HW_KEY_SIZE,
1052                         .max_keysize = CC_HW_KEY_SIZE,
1053                         .ivsize = 0,
1054                         },
1055                 .cipher_mode = DRV_CIPHER_ECB,
1056                 .flow_mode = S_DIN_to_AES,
1057                 .min_hw_rev = CC_HW_REV_712,
1058                 .std_body = CC_STD_NIST,
1059                 .sec_func = true,
1060         },
1061         {
1062                 .name = "cbc(paes)",
1063                 .driver_name = "cbc-paes-ccree",
1064                 .blocksize = AES_BLOCK_SIZE,
1065                 .template_skcipher = {
1066                         .setkey = cc_cipher_sethkey,
1067                         .encrypt = cc_cipher_encrypt,
1068                         .decrypt = cc_cipher_decrypt,
1069                         .min_keysize = CC_HW_KEY_SIZE,
1070                         .max_keysize = CC_HW_KEY_SIZE,
1071                         .ivsize = AES_BLOCK_SIZE,
1072                 },
1073                 .cipher_mode = DRV_CIPHER_CBC,
1074                 .flow_mode = S_DIN_to_AES,
1075                 .min_hw_rev = CC_HW_REV_712,
1076                 .std_body = CC_STD_NIST,
1077                 .sec_func = true,
1078         },
1079         {
1080                 .name = "ofb(paes)",
1081                 .driver_name = "ofb-paes-ccree",
1082                 .blocksize = AES_BLOCK_SIZE,
1083                 .template_skcipher = {
1084                         .setkey = cc_cipher_sethkey,
1085                         .encrypt = cc_cipher_encrypt,
1086                         .decrypt = cc_cipher_decrypt,
1087                         .min_keysize = CC_HW_KEY_SIZE,
1088                         .max_keysize = CC_HW_KEY_SIZE,
1089                         .ivsize = AES_BLOCK_SIZE,
1090                         },
1091                 .cipher_mode = DRV_CIPHER_OFB,
1092                 .flow_mode = S_DIN_to_AES,
1093                 .min_hw_rev = CC_HW_REV_712,
1094                 .std_body = CC_STD_NIST,
1095                 .sec_func = true,
1096         },
1097         {
1098                 .name = "cts(cbc(paes))",
1099                 .driver_name = "cts-cbc-paes-ccree",
1100                 .blocksize = AES_BLOCK_SIZE,
1101                 .template_skcipher = {
1102                         .setkey = cc_cipher_sethkey,
1103                         .encrypt = cc_cipher_encrypt,
1104                         .decrypt = cc_cipher_decrypt,
1105                         .min_keysize = CC_HW_KEY_SIZE,
1106                         .max_keysize = CC_HW_KEY_SIZE,
1107                         .ivsize = AES_BLOCK_SIZE,
1108                         },
1109                 .cipher_mode = DRV_CIPHER_CBC_CTS,
1110                 .flow_mode = S_DIN_to_AES,
1111                 .min_hw_rev = CC_HW_REV_712,
1112                 .std_body = CC_STD_NIST,
1113                 .sec_func = true,
1114         },
1115         {
1116                 .name = "ctr(paes)",
1117                 .driver_name = "ctr-paes-ccree",
1118                 .blocksize = 1,
1119                 .template_skcipher = {
1120                         .setkey = cc_cipher_sethkey,
1121                         .encrypt = cc_cipher_encrypt,
1122                         .decrypt = cc_cipher_decrypt,
1123                         .min_keysize = CC_HW_KEY_SIZE,
1124                         .max_keysize = CC_HW_KEY_SIZE,
1125                         .ivsize = AES_BLOCK_SIZE,
1126                         },
1127                 .cipher_mode = DRV_CIPHER_CTR,
1128                 .flow_mode = S_DIN_to_AES,
1129                 .min_hw_rev = CC_HW_REV_712,
1130                 .std_body = CC_STD_NIST,
1131                 .sec_func = true,
1132         },
1133         {
1134                 /* See https://www.mail-archive.com/linux-crypto@vger.kernel.org/msg40576.html
1135                  * for the reason why this differs from the generic
1136                  * implementation.
1137                  */
1138                 .name = "xts(aes)",
1139                 .driver_name = "xts-aes-ccree",
1140                 .blocksize = 1,
1141                 .template_skcipher = {
1142                         .setkey = cc_cipher_setkey,
1143                         .encrypt = cc_cipher_encrypt,
1144                         .decrypt = cc_cipher_decrypt,
1145                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1146                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1147                         .ivsize = AES_BLOCK_SIZE,
1148                         },
1149                 .cipher_mode = DRV_CIPHER_XTS,
1150                 .flow_mode = S_DIN_to_AES,
1151                 .min_hw_rev = CC_HW_REV_630,
1152                 .std_body = CC_STD_NIST,
1153         },
1154         {
1155                 .name = "essiv(cbc(aes),sha256)",
1156                 .driver_name = "essiv-aes-ccree",
1157                 .blocksize = AES_BLOCK_SIZE,
1158                 .template_skcipher = {
1159                         .setkey = cc_cipher_setkey,
1160                         .encrypt = cc_cipher_encrypt,
1161                         .decrypt = cc_cipher_decrypt,
1162                         .min_keysize = AES_MIN_KEY_SIZE,
1163                         .max_keysize = AES_MAX_KEY_SIZE,
1164                         .ivsize = AES_BLOCK_SIZE,
1165                         },
1166                 .cipher_mode = DRV_CIPHER_ESSIV,
1167                 .flow_mode = S_DIN_to_AES,
1168                 .min_hw_rev = CC_HW_REV_712,
1169                 .std_body = CC_STD_NIST,
1170         },
1171         {
1172                 .name = "ecb(aes)",
1173                 .driver_name = "ecb-aes-ccree",
1174                 .blocksize = AES_BLOCK_SIZE,
1175                 .template_skcipher = {
1176                         .setkey = cc_cipher_setkey,
1177                         .encrypt = cc_cipher_encrypt,
1178                         .decrypt = cc_cipher_decrypt,
1179                         .min_keysize = AES_MIN_KEY_SIZE,
1180                         .max_keysize = AES_MAX_KEY_SIZE,
1181                         .ivsize = 0,
1182                         },
1183                 .cipher_mode = DRV_CIPHER_ECB,
1184                 .flow_mode = S_DIN_to_AES,
1185                 .min_hw_rev = CC_HW_REV_630,
1186                 .std_body = CC_STD_NIST,
1187         },
1188         {
1189                 .name = "cbc(aes)",
1190                 .driver_name = "cbc-aes-ccree",
1191                 .blocksize = AES_BLOCK_SIZE,
1192                 .template_skcipher = {
1193                         .setkey = cc_cipher_setkey,
1194                         .encrypt = cc_cipher_encrypt,
1195                         .decrypt = cc_cipher_decrypt,
1196                         .min_keysize = AES_MIN_KEY_SIZE,
1197                         .max_keysize = AES_MAX_KEY_SIZE,
1198                         .ivsize = AES_BLOCK_SIZE,
1199                 },
1200                 .cipher_mode = DRV_CIPHER_CBC,
1201                 .flow_mode = S_DIN_to_AES,
1202                 .min_hw_rev = CC_HW_REV_630,
1203                 .std_body = CC_STD_NIST,
1204         },
1205         {
1206                 .name = "ofb(aes)",
1207                 .driver_name = "ofb-aes-ccree",
1208                 .blocksize = 1,
1209                 .template_skcipher = {
1210                         .setkey = cc_cipher_setkey,
1211                         .encrypt = cc_cipher_encrypt,
1212                         .decrypt = cc_cipher_decrypt,
1213                         .min_keysize = AES_MIN_KEY_SIZE,
1214                         .max_keysize = AES_MAX_KEY_SIZE,
1215                         .ivsize = AES_BLOCK_SIZE,
1216                         },
1217                 .cipher_mode = DRV_CIPHER_OFB,
1218                 .flow_mode = S_DIN_to_AES,
1219                 .min_hw_rev = CC_HW_REV_630,
1220                 .std_body = CC_STD_NIST,
1221         },
1222         {
1223                 .name = "cts(cbc(aes))",
1224                 .driver_name = "cts-cbc-aes-ccree",
1225                 .blocksize = AES_BLOCK_SIZE,
1226                 .template_skcipher = {
1227                         .setkey = cc_cipher_setkey,
1228                         .encrypt = cc_cipher_encrypt,
1229                         .decrypt = cc_cipher_decrypt,
1230                         .min_keysize = AES_MIN_KEY_SIZE,
1231                         .max_keysize = AES_MAX_KEY_SIZE,
1232                         .ivsize = AES_BLOCK_SIZE,
1233                         },
1234                 .cipher_mode = DRV_CIPHER_CBC_CTS,
1235                 .flow_mode = S_DIN_to_AES,
1236                 .min_hw_rev = CC_HW_REV_630,
1237                 .std_body = CC_STD_NIST,
1238         },
1239         {
1240                 .name = "ctr(aes)",
1241                 .driver_name = "ctr-aes-ccree",
1242                 .blocksize = 1,
1243                 .template_skcipher = {
1244                         .setkey = cc_cipher_setkey,
1245                         .encrypt = cc_cipher_encrypt,
1246                         .decrypt = cc_cipher_decrypt,
1247                         .min_keysize = AES_MIN_KEY_SIZE,
1248                         .max_keysize = AES_MAX_KEY_SIZE,
1249                         .ivsize = AES_BLOCK_SIZE,
1250                         },
1251                 .cipher_mode = DRV_CIPHER_CTR,
1252                 .flow_mode = S_DIN_to_AES,
1253                 .min_hw_rev = CC_HW_REV_630,
1254                 .std_body = CC_STD_NIST,
1255         },
1256         {
1257                 .name = "cbc(des3_ede)",
1258                 .driver_name = "cbc-3des-ccree",
1259                 .blocksize = DES3_EDE_BLOCK_SIZE,
1260                 .template_skcipher = {
1261                         .setkey = cc_cipher_setkey,
1262                         .encrypt = cc_cipher_encrypt,
1263                         .decrypt = cc_cipher_decrypt,
1264                         .min_keysize = DES3_EDE_KEY_SIZE,
1265                         .max_keysize = DES3_EDE_KEY_SIZE,
1266                         .ivsize = DES3_EDE_BLOCK_SIZE,
1267                         },
1268                 .cipher_mode = DRV_CIPHER_CBC,
1269                 .flow_mode = S_DIN_to_DES,
1270                 .min_hw_rev = CC_HW_REV_630,
1271                 .std_body = CC_STD_NIST,
1272         },
1273         {
1274                 .name = "ecb(des3_ede)",
1275                 .driver_name = "ecb-3des-ccree",
1276                 .blocksize = DES3_EDE_BLOCK_SIZE,
1277                 .template_skcipher = {
1278                         .setkey = cc_cipher_setkey,
1279                         .encrypt = cc_cipher_encrypt,
1280                         .decrypt = cc_cipher_decrypt,
1281                         .min_keysize = DES3_EDE_KEY_SIZE,
1282                         .max_keysize = DES3_EDE_KEY_SIZE,
1283                         .ivsize = 0,
1284                         },
1285                 .cipher_mode = DRV_CIPHER_ECB,
1286                 .flow_mode = S_DIN_to_DES,
1287                 .min_hw_rev = CC_HW_REV_630,
1288                 .std_body = CC_STD_NIST,
1289         },
1290         {
1291                 .name = "cbc(des)",
1292                 .driver_name = "cbc-des-ccree",
1293                 .blocksize = DES_BLOCK_SIZE,
1294                 .template_skcipher = {
1295                         .setkey = cc_cipher_setkey,
1296                         .encrypt = cc_cipher_encrypt,
1297                         .decrypt = cc_cipher_decrypt,
1298                         .min_keysize = DES_KEY_SIZE,
1299                         .max_keysize = DES_KEY_SIZE,
1300                         .ivsize = DES_BLOCK_SIZE,
1301                         },
1302                 .cipher_mode = DRV_CIPHER_CBC,
1303                 .flow_mode = S_DIN_to_DES,
1304                 .min_hw_rev = CC_HW_REV_630,
1305                 .std_body = CC_STD_NIST,
1306         },
1307         {
1308                 .name = "ecb(des)",
1309                 .driver_name = "ecb-des-ccree",
1310                 .blocksize = DES_BLOCK_SIZE,
1311                 .template_skcipher = {
1312                         .setkey = cc_cipher_setkey,
1313                         .encrypt = cc_cipher_encrypt,
1314                         .decrypt = cc_cipher_decrypt,
1315                         .min_keysize = DES_KEY_SIZE,
1316                         .max_keysize = DES_KEY_SIZE,
1317                         .ivsize = 0,
1318                         },
1319                 .cipher_mode = DRV_CIPHER_ECB,
1320                 .flow_mode = S_DIN_to_DES,
1321                 .min_hw_rev = CC_HW_REV_630,
1322                 .std_body = CC_STD_NIST,
1323         },
1324         {
1325                 .name = "cbc(sm4)",
1326                 .driver_name = "cbc-sm4-ccree",
1327                 .blocksize = SM4_BLOCK_SIZE,
1328                 .template_skcipher = {
1329                         .setkey = cc_cipher_setkey,
1330                         .encrypt = cc_cipher_encrypt,
1331                         .decrypt = cc_cipher_decrypt,
1332                         .min_keysize = SM4_KEY_SIZE,
1333                         .max_keysize = SM4_KEY_SIZE,
1334                         .ivsize = SM4_BLOCK_SIZE,
1335                         },
1336                 .cipher_mode = DRV_CIPHER_CBC,
1337                 .flow_mode = S_DIN_to_SM4,
1338                 .min_hw_rev = CC_HW_REV_713,
1339                 .std_body = CC_STD_OSCCA,
1340         },
1341         {
1342                 .name = "ecb(sm4)",
1343                 .driver_name = "ecb-sm4-ccree",
1344                 .blocksize = SM4_BLOCK_SIZE,
1345                 .template_skcipher = {
1346                         .setkey = cc_cipher_setkey,
1347                         .encrypt = cc_cipher_encrypt,
1348                         .decrypt = cc_cipher_decrypt,
1349                         .min_keysize = SM4_KEY_SIZE,
1350                         .max_keysize = SM4_KEY_SIZE,
1351                         .ivsize = 0,
1352                         },
1353                 .cipher_mode = DRV_CIPHER_ECB,
1354                 .flow_mode = S_DIN_to_SM4,
1355                 .min_hw_rev = CC_HW_REV_713,
1356                 .std_body = CC_STD_OSCCA,
1357         },
1358         {
1359                 .name = "ctr(sm4)",
1360                 .driver_name = "ctr-sm4-ccree",
1361                 .blocksize = 1,
1362                 .template_skcipher = {
1363                         .setkey = cc_cipher_setkey,
1364                         .encrypt = cc_cipher_encrypt,
1365                         .decrypt = cc_cipher_decrypt,
1366                         .min_keysize = SM4_KEY_SIZE,
1367                         .max_keysize = SM4_KEY_SIZE,
1368                         .ivsize = SM4_BLOCK_SIZE,
1369                         },
1370                 .cipher_mode = DRV_CIPHER_CTR,
1371                 .flow_mode = S_DIN_to_SM4,
1372                 .min_hw_rev = CC_HW_REV_713,
1373                 .std_body = CC_STD_OSCCA,
1374         },
1375         {
1376                 .name = "cbc(psm4)",
1377                 .driver_name = "cbc-psm4-ccree",
1378                 .blocksize = SM4_BLOCK_SIZE,
1379                 .template_skcipher = {
1380                         .setkey = cc_cipher_sethkey,
1381                         .encrypt = cc_cipher_encrypt,
1382                         .decrypt = cc_cipher_decrypt,
1383                         .min_keysize = CC_HW_KEY_SIZE,
1384                         .max_keysize = CC_HW_KEY_SIZE,
1385                         .ivsize = SM4_BLOCK_SIZE,
1386                         },
1387                 .cipher_mode = DRV_CIPHER_CBC,
1388                 .flow_mode = S_DIN_to_SM4,
1389                 .min_hw_rev = CC_HW_REV_713,
1390                 .std_body = CC_STD_OSCCA,
1391                 .sec_func = true,
1392         },
1393         {
1394                 .name = "ctr(psm4)",
1395                 .driver_name = "ctr-psm4-ccree",
1396                 .blocksize = SM4_BLOCK_SIZE,
1397                 .template_skcipher = {
1398                         .setkey = cc_cipher_sethkey,
1399                         .encrypt = cc_cipher_encrypt,
1400                         .decrypt = cc_cipher_decrypt,
1401                         .min_keysize = CC_HW_KEY_SIZE,
1402                         .max_keysize = CC_HW_KEY_SIZE,
1403                         .ivsize = SM4_BLOCK_SIZE,
1404                         },
1405                 .cipher_mode = DRV_CIPHER_CTR,
1406                 .flow_mode = S_DIN_to_SM4,
1407                 .min_hw_rev = CC_HW_REV_713,
1408                 .std_body = CC_STD_OSCCA,
1409                 .sec_func = true,
1410         },
1411 };
1412
1413 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1414                                            struct device *dev)
1415 {
1416         struct cc_crypto_alg *t_alg;
1417         struct skcipher_alg *alg;
1418
1419         t_alg = devm_kzalloc(dev, sizeof(*t_alg), GFP_KERNEL);
1420         if (!t_alg)
1421                 return ERR_PTR(-ENOMEM);
1422
1423         alg = &t_alg->skcipher_alg;
1424
1425         memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1426
1427         snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1428         snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1429                  tmpl->driver_name);
1430         alg->base.cra_module = THIS_MODULE;
1431         alg->base.cra_priority = CC_CRA_PRIO;
1432         alg->base.cra_blocksize = tmpl->blocksize;
1433         alg->base.cra_alignmask = 0;
1434         alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1435
1436         alg->base.cra_init = cc_cipher_init;
1437         alg->base.cra_exit = cc_cipher_exit;
1438         alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1439
1440         t_alg->cipher_mode = tmpl->cipher_mode;
1441         t_alg->flow_mode = tmpl->flow_mode;
1442
1443         return t_alg;
1444 }
1445
1446 int cc_cipher_free(struct cc_drvdata *drvdata)
1447 {
1448         struct cc_crypto_alg *t_alg, *n;
1449
1450         /* Remove registered algs */
1451         list_for_each_entry_safe(t_alg, n, &drvdata->alg_list, entry) {
1452                 crypto_unregister_skcipher(&t_alg->skcipher_alg);
1453                 list_del(&t_alg->entry);
1454         }
1455         return 0;
1456 }
1457
1458 int cc_cipher_alloc(struct cc_drvdata *drvdata)
1459 {
1460         struct cc_crypto_alg *t_alg;
1461         struct device *dev = drvdata_to_dev(drvdata);
1462         int rc = -ENOMEM;
1463         int alg;
1464
1465         INIT_LIST_HEAD(&drvdata->alg_list);
1466
1467         /* Linux crypto */
1468         dev_dbg(dev, "Number of algorithms = %zu\n",
1469                 ARRAY_SIZE(skcipher_algs));
1470         for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1471                 if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
1472                     !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1473                     (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
1474                         continue;
1475
1476                 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1477                 t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1478                 if (IS_ERR(t_alg)) {
1479                         rc = PTR_ERR(t_alg);
1480                         dev_err(dev, "%s alg allocation failed\n",
1481                                 skcipher_algs[alg].driver_name);
1482                         goto fail0;
1483                 }
1484                 t_alg->drvdata = drvdata;
1485
1486                 dev_dbg(dev, "registering %s\n",
1487                         skcipher_algs[alg].driver_name);
1488                 rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1489                 dev_dbg(dev, "%s alg registration rc = %x\n",
1490                         t_alg->skcipher_alg.base.cra_driver_name, rc);
1491                 if (rc) {
1492                         dev_err(dev, "%s alg registration failed\n",
1493                                 t_alg->skcipher_alg.base.cra_driver_name);
1494                         goto fail0;
1495                 }
1496
1497                 list_add_tail(&t_alg->entry, &drvdata->alg_list);
1498                 dev_dbg(dev, "Registered %s\n",
1499                         t_alg->skcipher_alg.base.cra_driver_name);
1500         }
1501         return 0;
1502
1503 fail0:
1504         cc_cipher_free(drvdata);
1505         return rc;
1506 }