GNU Linux-libre 4.9.311-gnu1
[releases.git] / drivers / crypto / rockchip / rk3288_crypto_ablkcipher.c
1 /*
2  * Crypto acceleration support for Rockchip RK3288
3  *
4  * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
5  *
6  * Author: Zain Wang <zain.wang@rock-chips.com>
7  *
8  * This program is free software; you can redistribute it and/or modify it
9  * under the terms and conditions of the GNU General Public License,
10  * version 2, as published by the Free Software Foundation.
11  *
12  * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
13  */
14 #include "rk3288_crypto.h"
15
16 #define RK_CRYPTO_DEC                   BIT(0)
17
18 static void rk_crypto_complete(struct rk_crypto_info *dev, int err)
19 {
20         if (dev->ablk_req->base.complete)
21                 dev->ablk_req->base.complete(&dev->ablk_req->base, err);
22 }
23
24 static int rk_handle_req(struct rk_crypto_info *dev,
25                          struct ablkcipher_request *req)
26 {
27         unsigned long flags;
28         int err;
29
30         if (!IS_ALIGNED(req->nbytes, dev->align_size))
31                 return -EINVAL;
32
33         dev->left_bytes = req->nbytes;
34         dev->total = req->nbytes;
35         dev->sg_src = req->src;
36         dev->first = req->src;
37         dev->nents = sg_nents(req->src);
38         dev->sg_dst = req->dst;
39         dev->aligned = 1;
40         dev->ablk_req = req;
41
42         spin_lock_irqsave(&dev->lock, flags);
43         err = ablkcipher_enqueue_request(&dev->queue, req);
44         spin_unlock_irqrestore(&dev->lock, flags);
45         tasklet_schedule(&dev->crypto_tasklet);
46         return err;
47 }
48
49 static int rk_aes_setkey(struct crypto_ablkcipher *cipher,
50                          const u8 *key, unsigned int keylen)
51 {
52         struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
53         struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
54
55         if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
56             keylen != AES_KEYSIZE_256) {
57                 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
58                 return -EINVAL;
59         }
60         ctx->keylen = keylen;
61         memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
62         return 0;
63 }
64
65 static int rk_tdes_setkey(struct crypto_ablkcipher *cipher,
66                           const u8 *key, unsigned int keylen)
67 {
68         struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
69         struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
70         u32 tmp[DES_EXPKEY_WORDS];
71
72         if (keylen != DES_KEY_SIZE && keylen != DES3_EDE_KEY_SIZE) {
73                 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
74                 return -EINVAL;
75         }
76
77         if (keylen == DES_KEY_SIZE) {
78                 if (!des_ekey(tmp, key) &&
79                     (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
80                         tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
81                         return -EINVAL;
82                 }
83         }
84
85         ctx->keylen = keylen;
86         memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
87         return 0;
88 }
89
90 static int rk_aes_ecb_encrypt(struct ablkcipher_request *req)
91 {
92         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
93         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
94         struct rk_crypto_info *dev = ctx->dev;
95
96         dev->mode = RK_CRYPTO_AES_ECB_MODE;
97         return rk_handle_req(dev, req);
98 }
99
100 static int rk_aes_ecb_decrypt(struct ablkcipher_request *req)
101 {
102         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
103         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
104         struct rk_crypto_info *dev = ctx->dev;
105
106         dev->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
107         return rk_handle_req(dev, req);
108 }
109
110 static int rk_aes_cbc_encrypt(struct ablkcipher_request *req)
111 {
112         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
113         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
114         struct rk_crypto_info *dev = ctx->dev;
115
116         dev->mode = RK_CRYPTO_AES_CBC_MODE;
117         return rk_handle_req(dev, req);
118 }
119
120 static int rk_aes_cbc_decrypt(struct ablkcipher_request *req)
121 {
122         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
123         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
124         struct rk_crypto_info *dev = ctx->dev;
125
126         dev->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
127         return rk_handle_req(dev, req);
128 }
129
130 static int rk_des_ecb_encrypt(struct ablkcipher_request *req)
131 {
132         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
133         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
134         struct rk_crypto_info *dev = ctx->dev;
135
136         dev->mode = 0;
137         return rk_handle_req(dev, req);
138 }
139
140 static int rk_des_ecb_decrypt(struct ablkcipher_request *req)
141 {
142         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
143         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
144         struct rk_crypto_info *dev = ctx->dev;
145
146         dev->mode = RK_CRYPTO_DEC;
147         return rk_handle_req(dev, req);
148 }
149
150 static int rk_des_cbc_encrypt(struct ablkcipher_request *req)
151 {
152         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
153         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
154         struct rk_crypto_info *dev = ctx->dev;
155
156         dev->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
157         return rk_handle_req(dev, req);
158 }
159
160 static int rk_des_cbc_decrypt(struct ablkcipher_request *req)
161 {
162         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
163         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
164         struct rk_crypto_info *dev = ctx->dev;
165
166         dev->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
167         return rk_handle_req(dev, req);
168 }
169
170 static int rk_des3_ede_ecb_encrypt(struct ablkcipher_request *req)
171 {
172         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
173         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
174         struct rk_crypto_info *dev = ctx->dev;
175
176         dev->mode = RK_CRYPTO_TDES_SELECT;
177         return rk_handle_req(dev, req);
178 }
179
180 static int rk_des3_ede_ecb_decrypt(struct ablkcipher_request *req)
181 {
182         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
183         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
184         struct rk_crypto_info *dev = ctx->dev;
185
186         dev->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
187         return rk_handle_req(dev, req);
188 }
189
190 static int rk_des3_ede_cbc_encrypt(struct ablkcipher_request *req)
191 {
192         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
193         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
194         struct rk_crypto_info *dev = ctx->dev;
195
196         dev->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
197         return rk_handle_req(dev, req);
198 }
199
200 static int rk_des3_ede_cbc_decrypt(struct ablkcipher_request *req)
201 {
202         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
203         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
204         struct rk_crypto_info *dev = ctx->dev;
205
206         dev->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
207                     RK_CRYPTO_DEC;
208         return rk_handle_req(dev, req);
209 }
210
211 static void rk_ablk_hw_init(struct rk_crypto_info *dev)
212 {
213         struct crypto_ablkcipher *cipher =
214                 crypto_ablkcipher_reqtfm(dev->ablk_req);
215         struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
216         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
217         u32 ivsize, block, conf_reg = 0;
218
219         block = crypto_tfm_alg_blocksize(tfm);
220         ivsize = crypto_ablkcipher_ivsize(cipher);
221
222         if (block == DES_BLOCK_SIZE) {
223                 dev->mode |= RK_CRYPTO_TDES_FIFO_MODE |
224                              RK_CRYPTO_TDES_BYTESWAP_KEY |
225                              RK_CRYPTO_TDES_BYTESWAP_IV;
226                 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, dev->mode);
227                 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0,
228                             dev->ablk_req->info, ivsize);
229                 conf_reg = RK_CRYPTO_DESSEL;
230         } else {
231                 dev->mode |= RK_CRYPTO_AES_FIFO_MODE |
232                              RK_CRYPTO_AES_KEY_CHANGE |
233                              RK_CRYPTO_AES_BYTESWAP_KEY |
234                              RK_CRYPTO_AES_BYTESWAP_IV;
235                 if (ctx->keylen == AES_KEYSIZE_192)
236                         dev->mode |= RK_CRYPTO_AES_192BIT_key;
237                 else if (ctx->keylen == AES_KEYSIZE_256)
238                         dev->mode |= RK_CRYPTO_AES_256BIT_key;
239                 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, dev->mode);
240                 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0,
241                             dev->ablk_req->info, ivsize);
242         }
243         conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
244                     RK_CRYPTO_BYTESWAP_BRFIFO;
245         CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
246         CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
247                      RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
248 }
249
250 static void crypto_dma_start(struct rk_crypto_info *dev)
251 {
252         CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
253         CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
254         CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
255         CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
256                      _SBF(RK_CRYPTO_BLOCK_START, 16));
257 }
258
259 static int rk_set_data_start(struct rk_crypto_info *dev)
260 {
261         int err;
262
263         err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
264         if (!err)
265                 crypto_dma_start(dev);
266         return err;
267 }
268
269 static int rk_ablk_start(struct rk_crypto_info *dev)
270 {
271         unsigned long flags;
272         int err;
273
274         spin_lock_irqsave(&dev->lock, flags);
275         rk_ablk_hw_init(dev);
276         err = rk_set_data_start(dev);
277         spin_unlock_irqrestore(&dev->lock, flags);
278         return err;
279 }
280
281 static void rk_iv_copyback(struct rk_crypto_info *dev)
282 {
283         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(dev->ablk_req);
284         u32 ivsize = crypto_ablkcipher_ivsize(tfm);
285
286         if (ivsize == DES_BLOCK_SIZE)
287                 memcpy_fromio(dev->ablk_req->info,
288                               dev->reg + RK_CRYPTO_TDES_IV_0, ivsize);
289         else if (ivsize == AES_BLOCK_SIZE)
290                 memcpy_fromio(dev->ablk_req->info,
291                               dev->reg + RK_CRYPTO_AES_IV_0, ivsize);
292 }
293
294 /* return:
295  *      true    some err was occurred
296  *      fault   no err, continue
297  */
298 static int rk_ablk_rx(struct rk_crypto_info *dev)
299 {
300         int err = 0;
301
302         dev->unload_data(dev);
303         if (!dev->aligned) {
304                 if (!sg_pcopy_from_buffer(dev->ablk_req->dst, dev->nents,
305                                           dev->addr_vir, dev->count,
306                                           dev->total - dev->left_bytes -
307                                           dev->count)) {
308                         err = -EINVAL;
309                         goto out_rx;
310                 }
311         }
312         if (dev->left_bytes) {
313                 if (dev->aligned) {
314                         if (sg_is_last(dev->sg_src)) {
315                                 dev_err(dev->dev, "[%s:%d] Lack of data\n",
316                                         __func__, __LINE__);
317                                 err = -ENOMEM;
318                                 goto out_rx;
319                         }
320                         dev->sg_src = sg_next(dev->sg_src);
321                         dev->sg_dst = sg_next(dev->sg_dst);
322                 }
323                 err = rk_set_data_start(dev);
324         } else {
325                 rk_iv_copyback(dev);
326                 /* here show the calculation is over without any err */
327                 dev->complete(dev, 0);
328         }
329 out_rx:
330         return err;
331 }
332
333 static int rk_ablk_cra_init(struct crypto_tfm *tfm)
334 {
335         struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
336         struct crypto_alg *alg = tfm->__crt_alg;
337         struct rk_crypto_tmp *algt;
338
339         algt = container_of(alg, struct rk_crypto_tmp, alg.crypto);
340
341         ctx->dev = algt->dev;
342         ctx->dev->align_size = crypto_tfm_alg_alignmask(tfm) + 1;
343         ctx->dev->start = rk_ablk_start;
344         ctx->dev->update = rk_ablk_rx;
345         ctx->dev->complete = rk_crypto_complete;
346         ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
347
348         return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
349 }
350
351 static void rk_ablk_cra_exit(struct crypto_tfm *tfm)
352 {
353         struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
354
355         free_page((unsigned long)ctx->dev->addr_vir);
356         ctx->dev->disable_clk(ctx->dev);
357 }
358
359 struct rk_crypto_tmp rk_ecb_aes_alg = {
360         .type = ALG_TYPE_CIPHER,
361         .alg.crypto = {
362                 .cra_name               = "ecb(aes)",
363                 .cra_driver_name        = "ecb-aes-rk",
364                 .cra_priority           = 300,
365                 .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
366                                           CRYPTO_ALG_ASYNC,
367                 .cra_blocksize          = AES_BLOCK_SIZE,
368                 .cra_ctxsize            = sizeof(struct rk_cipher_ctx),
369                 .cra_alignmask          = 0x0f,
370                 .cra_type               = &crypto_ablkcipher_type,
371                 .cra_module             = THIS_MODULE,
372                 .cra_init               = rk_ablk_cra_init,
373                 .cra_exit               = rk_ablk_cra_exit,
374                 .cra_u.ablkcipher       = {
375                         .min_keysize    = AES_MIN_KEY_SIZE,
376                         .max_keysize    = AES_MAX_KEY_SIZE,
377                         .setkey         = rk_aes_setkey,
378                         .encrypt        = rk_aes_ecb_encrypt,
379                         .decrypt        = rk_aes_ecb_decrypt,
380                 }
381         }
382 };
383
384 struct rk_crypto_tmp rk_cbc_aes_alg = {
385         .type = ALG_TYPE_CIPHER,
386         .alg.crypto = {
387                 .cra_name               = "cbc(aes)",
388                 .cra_driver_name        = "cbc-aes-rk",
389                 .cra_priority           = 300,
390                 .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
391                                           CRYPTO_ALG_ASYNC,
392                 .cra_blocksize          = AES_BLOCK_SIZE,
393                 .cra_ctxsize            = sizeof(struct rk_cipher_ctx),
394                 .cra_alignmask          = 0x0f,
395                 .cra_type               = &crypto_ablkcipher_type,
396                 .cra_module             = THIS_MODULE,
397                 .cra_init               = rk_ablk_cra_init,
398                 .cra_exit               = rk_ablk_cra_exit,
399                 .cra_u.ablkcipher       = {
400                         .min_keysize    = AES_MIN_KEY_SIZE,
401                         .max_keysize    = AES_MAX_KEY_SIZE,
402                         .ivsize         = AES_BLOCK_SIZE,
403                         .setkey         = rk_aes_setkey,
404                         .encrypt        = rk_aes_cbc_encrypt,
405                         .decrypt        = rk_aes_cbc_decrypt,
406                 }
407         }
408 };
409
410 struct rk_crypto_tmp rk_ecb_des_alg = {
411         .type = ALG_TYPE_CIPHER,
412         .alg.crypto = {
413                 .cra_name               = "ecb(des)",
414                 .cra_driver_name        = "ecb-des-rk",
415                 .cra_priority           = 300,
416                 .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
417                                           CRYPTO_ALG_ASYNC,
418                 .cra_blocksize          = DES_BLOCK_SIZE,
419                 .cra_ctxsize            = sizeof(struct rk_cipher_ctx),
420                 .cra_alignmask          = 0x07,
421                 .cra_type               = &crypto_ablkcipher_type,
422                 .cra_module             = THIS_MODULE,
423                 .cra_init               = rk_ablk_cra_init,
424                 .cra_exit               = rk_ablk_cra_exit,
425                 .cra_u.ablkcipher       = {
426                         .min_keysize    = DES_KEY_SIZE,
427                         .max_keysize    = DES_KEY_SIZE,
428                         .setkey         = rk_tdes_setkey,
429                         .encrypt        = rk_des_ecb_encrypt,
430                         .decrypt        = rk_des_ecb_decrypt,
431                 }
432         }
433 };
434
435 struct rk_crypto_tmp rk_cbc_des_alg = {
436         .type = ALG_TYPE_CIPHER,
437         .alg.crypto = {
438                 .cra_name               = "cbc(des)",
439                 .cra_driver_name        = "cbc-des-rk",
440                 .cra_priority           = 300,
441                 .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
442                                           CRYPTO_ALG_ASYNC,
443                 .cra_blocksize          = DES_BLOCK_SIZE,
444                 .cra_ctxsize            = sizeof(struct rk_cipher_ctx),
445                 .cra_alignmask          = 0x07,
446                 .cra_type               = &crypto_ablkcipher_type,
447                 .cra_module             = THIS_MODULE,
448                 .cra_init               = rk_ablk_cra_init,
449                 .cra_exit               = rk_ablk_cra_exit,
450                 .cra_u.ablkcipher       = {
451                         .min_keysize    = DES_KEY_SIZE,
452                         .max_keysize    = DES_KEY_SIZE,
453                         .ivsize         = DES_BLOCK_SIZE,
454                         .setkey         = rk_tdes_setkey,
455                         .encrypt        = rk_des_cbc_encrypt,
456                         .decrypt        = rk_des_cbc_decrypt,
457                 }
458         }
459 };
460
461 struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
462         .type = ALG_TYPE_CIPHER,
463         .alg.crypto = {
464                 .cra_name               = "ecb(des3_ede)",
465                 .cra_driver_name        = "ecb-des3-ede-rk",
466                 .cra_priority           = 300,
467                 .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
468                                           CRYPTO_ALG_ASYNC,
469                 .cra_blocksize          = DES_BLOCK_SIZE,
470                 .cra_ctxsize            = sizeof(struct rk_cipher_ctx),
471                 .cra_alignmask          = 0x07,
472                 .cra_type               = &crypto_ablkcipher_type,
473                 .cra_module             = THIS_MODULE,
474                 .cra_init               = rk_ablk_cra_init,
475                 .cra_exit               = rk_ablk_cra_exit,
476                 .cra_u.ablkcipher       = {
477                         .min_keysize    = DES3_EDE_KEY_SIZE,
478                         .max_keysize    = DES3_EDE_KEY_SIZE,
479                         .ivsize         = DES_BLOCK_SIZE,
480                         .setkey         = rk_tdes_setkey,
481                         .encrypt        = rk_des3_ede_ecb_encrypt,
482                         .decrypt        = rk_des3_ede_ecb_decrypt,
483                 }
484         }
485 };
486
487 struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
488         .type = ALG_TYPE_CIPHER,
489         .alg.crypto = {
490                 .cra_name               = "cbc(des3_ede)",
491                 .cra_driver_name        = "cbc-des3-ede-rk",
492                 .cra_priority           = 300,
493                 .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
494                                           CRYPTO_ALG_ASYNC,
495                 .cra_blocksize          = DES_BLOCK_SIZE,
496                 .cra_ctxsize            = sizeof(struct rk_cipher_ctx),
497                 .cra_alignmask          = 0x07,
498                 .cra_type               = &crypto_ablkcipher_type,
499                 .cra_module             = THIS_MODULE,
500                 .cra_init               = rk_ablk_cra_init,
501                 .cra_exit               = rk_ablk_cra_exit,
502                 .cra_u.ablkcipher       = {
503                         .min_keysize    = DES3_EDE_KEY_SIZE,
504                         .max_keysize    = DES3_EDE_KEY_SIZE,
505                         .ivsize         = DES_BLOCK_SIZE,
506                         .setkey         = rk_tdes_setkey,
507                         .encrypt        = rk_des3_ede_cbc_encrypt,
508                         .decrypt        = rk_des3_ede_cbc_decrypt,
509                 }
510         }
511 };