1 // SPDX-License-Identifier: GPL-2.0-only
3 * Crypto acceleration support for Rockchip RK3288
5 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
7 * Author: Zain Wang <zain.wang@rock-chips.com>
9 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
11 #include <linux/device.h>
12 #include "rk3288_crypto.h"
14 #define RK_CRYPTO_DEC BIT(0)
16 static void rk_crypto_complete(struct crypto_async_request *base, int err)
19 base->complete(base, err);
22 static int rk_handle_req(struct rk_crypto_info *dev,
23 struct skcipher_request *req)
25 if (!IS_ALIGNED(req->cryptlen, dev->align_size))
28 return dev->enqueue(dev, &req->base);
31 static int rk_aes_setkey(struct crypto_skcipher *cipher,
32 const u8 *key, unsigned int keylen)
34 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
35 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
37 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
38 keylen != AES_KEYSIZE_256)
41 memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
45 static int rk_des_setkey(struct crypto_skcipher *cipher,
46 const u8 *key, unsigned int keylen)
48 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
51 err = verify_skcipher_des_key(cipher, key);
56 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
60 static int rk_tdes_setkey(struct crypto_skcipher *cipher,
61 const u8 *key, unsigned int keylen)
63 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
66 err = verify_skcipher_des3_key(cipher, key);
71 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
75 static int rk_aes_ecb_encrypt(struct skcipher_request *req)
77 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
78 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
79 struct rk_crypto_info *dev = ctx->dev;
81 ctx->mode = RK_CRYPTO_AES_ECB_MODE;
82 return rk_handle_req(dev, req);
85 static int rk_aes_ecb_decrypt(struct skcipher_request *req)
87 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
88 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
89 struct rk_crypto_info *dev = ctx->dev;
91 ctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
92 return rk_handle_req(dev, req);
95 static int rk_aes_cbc_encrypt(struct skcipher_request *req)
97 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
98 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
99 struct rk_crypto_info *dev = ctx->dev;
101 ctx->mode = RK_CRYPTO_AES_CBC_MODE;
102 return rk_handle_req(dev, req);
105 static int rk_aes_cbc_decrypt(struct skcipher_request *req)
107 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
108 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
109 struct rk_crypto_info *dev = ctx->dev;
111 ctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
112 return rk_handle_req(dev, req);
115 static int rk_des_ecb_encrypt(struct skcipher_request *req)
117 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
118 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
119 struct rk_crypto_info *dev = ctx->dev;
122 return rk_handle_req(dev, req);
125 static int rk_des_ecb_decrypt(struct skcipher_request *req)
127 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
128 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
129 struct rk_crypto_info *dev = ctx->dev;
131 ctx->mode = RK_CRYPTO_DEC;
132 return rk_handle_req(dev, req);
135 static int rk_des_cbc_encrypt(struct skcipher_request *req)
137 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
138 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
139 struct rk_crypto_info *dev = ctx->dev;
141 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
142 return rk_handle_req(dev, req);
145 static int rk_des_cbc_decrypt(struct skcipher_request *req)
147 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
148 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
149 struct rk_crypto_info *dev = ctx->dev;
151 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
152 return rk_handle_req(dev, req);
155 static int rk_des3_ede_ecb_encrypt(struct skcipher_request *req)
157 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
158 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
159 struct rk_crypto_info *dev = ctx->dev;
161 ctx->mode = RK_CRYPTO_TDES_SELECT;
162 return rk_handle_req(dev, req);
165 static int rk_des3_ede_ecb_decrypt(struct skcipher_request *req)
167 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
168 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
169 struct rk_crypto_info *dev = ctx->dev;
171 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
172 return rk_handle_req(dev, req);
175 static int rk_des3_ede_cbc_encrypt(struct skcipher_request *req)
177 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
178 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
179 struct rk_crypto_info *dev = ctx->dev;
181 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
182 return rk_handle_req(dev, req);
185 static int rk_des3_ede_cbc_decrypt(struct skcipher_request *req)
187 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
188 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
189 struct rk_crypto_info *dev = ctx->dev;
191 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
193 return rk_handle_req(dev, req);
196 static void rk_ablk_hw_init(struct rk_crypto_info *dev)
198 struct skcipher_request *req =
199 skcipher_request_cast(dev->async_req);
200 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
201 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
202 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
203 u32 ivsize, block, conf_reg = 0;
205 block = crypto_tfm_alg_blocksize(tfm);
206 ivsize = crypto_skcipher_ivsize(cipher);
208 if (block == DES_BLOCK_SIZE) {
209 ctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
210 RK_CRYPTO_TDES_BYTESWAP_KEY |
211 RK_CRYPTO_TDES_BYTESWAP_IV;
212 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, ctx->mode);
213 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, req->iv, ivsize);
214 conf_reg = RK_CRYPTO_DESSEL;
216 ctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
217 RK_CRYPTO_AES_KEY_CHANGE |
218 RK_CRYPTO_AES_BYTESWAP_KEY |
219 RK_CRYPTO_AES_BYTESWAP_IV;
220 if (ctx->keylen == AES_KEYSIZE_192)
221 ctx->mode |= RK_CRYPTO_AES_192BIT_key;
222 else if (ctx->keylen == AES_KEYSIZE_256)
223 ctx->mode |= RK_CRYPTO_AES_256BIT_key;
224 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, ctx->mode);
225 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, req->iv, ivsize);
227 conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
228 RK_CRYPTO_BYTESWAP_BRFIFO;
229 CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
230 CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
231 RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
234 static void crypto_dma_start(struct rk_crypto_info *dev)
236 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
237 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
238 CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
239 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
240 _SBF(RK_CRYPTO_BLOCK_START, 16));
243 static int rk_set_data_start(struct rk_crypto_info *dev)
246 struct skcipher_request *req =
247 skcipher_request_cast(dev->async_req);
248 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
249 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
250 u32 ivsize = crypto_skcipher_ivsize(tfm);
251 u8 *src_last_blk = page_address(sg_page(dev->sg_src)) +
252 dev->sg_src->offset + dev->sg_src->length - ivsize;
254 /* Store the iv that need to be updated in chain mode.
255 * And update the IV buffer to contain the next IV for decryption mode.
257 if (ctx->mode & RK_CRYPTO_DEC) {
258 memcpy(ctx->iv, src_last_blk, ivsize);
259 sg_pcopy_to_buffer(dev->first, dev->src_nents, req->iv,
260 ivsize, dev->total - ivsize);
263 err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
265 crypto_dma_start(dev);
269 static int rk_ablk_start(struct rk_crypto_info *dev)
271 struct skcipher_request *req =
272 skcipher_request_cast(dev->async_req);
276 dev->left_bytes = req->cryptlen;
277 dev->total = req->cryptlen;
278 dev->sg_src = req->src;
279 dev->first = req->src;
280 dev->src_nents = sg_nents(req->src);
281 dev->sg_dst = req->dst;
282 dev->dst_nents = sg_nents(req->dst);
285 spin_lock_irqsave(&dev->lock, flags);
286 rk_ablk_hw_init(dev);
287 err = rk_set_data_start(dev);
288 spin_unlock_irqrestore(&dev->lock, flags);
292 static void rk_iv_copyback(struct rk_crypto_info *dev)
294 struct skcipher_request *req =
295 skcipher_request_cast(dev->async_req);
296 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
297 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
298 u32 ivsize = crypto_skcipher_ivsize(tfm);
300 /* Update the IV buffer to contain the next IV for encryption mode. */
301 if (!(ctx->mode & RK_CRYPTO_DEC)) {
303 memcpy(req->iv, sg_virt(dev->sg_dst) +
304 dev->sg_dst->length - ivsize, ivsize);
306 memcpy(req->iv, dev->addr_vir +
307 dev->count - ivsize, ivsize);
312 static void rk_update_iv(struct rk_crypto_info *dev)
314 struct skcipher_request *req =
315 skcipher_request_cast(dev->async_req);
316 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
317 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
318 u32 ivsize = crypto_skcipher_ivsize(tfm);
321 if (ctx->mode & RK_CRYPTO_DEC) {
324 new_iv = page_address(sg_page(dev->sg_dst)) +
325 dev->sg_dst->offset + dev->sg_dst->length - ivsize;
328 if (ivsize == DES_BLOCK_SIZE)
329 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, new_iv, ivsize);
330 else if (ivsize == AES_BLOCK_SIZE)
331 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, new_iv, ivsize);
335 * true some err was occurred
336 * fault no err, continue
338 static int rk_ablk_rx(struct rk_crypto_info *dev)
341 struct skcipher_request *req =
342 skcipher_request_cast(dev->async_req);
344 dev->unload_data(dev);
346 if (!sg_pcopy_from_buffer(req->dst, dev->dst_nents,
347 dev->addr_vir, dev->count,
348 dev->total - dev->left_bytes -
354 if (dev->left_bytes) {
357 if (sg_is_last(dev->sg_src)) {
358 dev_err(dev->dev, "[%s:%d] Lack of data\n",
363 dev->sg_src = sg_next(dev->sg_src);
364 dev->sg_dst = sg_next(dev->sg_dst);
366 err = rk_set_data_start(dev);
369 /* here show the calculation is over without any err */
370 dev->complete(dev->async_req, 0);
371 tasklet_schedule(&dev->queue_task);
377 static int rk_ablk_init_tfm(struct crypto_skcipher *tfm)
379 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
380 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
381 struct rk_crypto_tmp *algt;
383 algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher);
385 ctx->dev = algt->dev;
386 ctx->dev->align_size = crypto_tfm_alg_alignmask(crypto_skcipher_tfm(tfm)) + 1;
387 ctx->dev->start = rk_ablk_start;
388 ctx->dev->update = rk_ablk_rx;
389 ctx->dev->complete = rk_crypto_complete;
390 ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
392 return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
395 static void rk_ablk_exit_tfm(struct crypto_skcipher *tfm)
397 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
399 free_page((unsigned long)ctx->dev->addr_vir);
400 ctx->dev->disable_clk(ctx->dev);
403 struct rk_crypto_tmp rk_ecb_aes_alg = {
404 .type = ALG_TYPE_CIPHER,
406 .base.cra_name = "ecb(aes)",
407 .base.cra_driver_name = "ecb-aes-rk",
408 .base.cra_priority = 300,
409 .base.cra_flags = CRYPTO_ALG_ASYNC,
410 .base.cra_blocksize = AES_BLOCK_SIZE,
411 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
412 .base.cra_alignmask = 0x0f,
413 .base.cra_module = THIS_MODULE,
415 .init = rk_ablk_init_tfm,
416 .exit = rk_ablk_exit_tfm,
417 .min_keysize = AES_MIN_KEY_SIZE,
418 .max_keysize = AES_MAX_KEY_SIZE,
419 .setkey = rk_aes_setkey,
420 .encrypt = rk_aes_ecb_encrypt,
421 .decrypt = rk_aes_ecb_decrypt,
425 struct rk_crypto_tmp rk_cbc_aes_alg = {
426 .type = ALG_TYPE_CIPHER,
428 .base.cra_name = "cbc(aes)",
429 .base.cra_driver_name = "cbc-aes-rk",
430 .base.cra_priority = 300,
431 .base.cra_flags = CRYPTO_ALG_ASYNC,
432 .base.cra_blocksize = AES_BLOCK_SIZE,
433 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
434 .base.cra_alignmask = 0x0f,
435 .base.cra_module = THIS_MODULE,
437 .init = rk_ablk_init_tfm,
438 .exit = rk_ablk_exit_tfm,
439 .min_keysize = AES_MIN_KEY_SIZE,
440 .max_keysize = AES_MAX_KEY_SIZE,
441 .ivsize = AES_BLOCK_SIZE,
442 .setkey = rk_aes_setkey,
443 .encrypt = rk_aes_cbc_encrypt,
444 .decrypt = rk_aes_cbc_decrypt,
448 struct rk_crypto_tmp rk_ecb_des_alg = {
449 .type = ALG_TYPE_CIPHER,
451 .base.cra_name = "ecb(des)",
452 .base.cra_driver_name = "ecb-des-rk",
453 .base.cra_priority = 300,
454 .base.cra_flags = CRYPTO_ALG_ASYNC,
455 .base.cra_blocksize = DES_BLOCK_SIZE,
456 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
457 .base.cra_alignmask = 0x07,
458 .base.cra_module = THIS_MODULE,
460 .init = rk_ablk_init_tfm,
461 .exit = rk_ablk_exit_tfm,
462 .min_keysize = DES_KEY_SIZE,
463 .max_keysize = DES_KEY_SIZE,
464 .setkey = rk_des_setkey,
465 .encrypt = rk_des_ecb_encrypt,
466 .decrypt = rk_des_ecb_decrypt,
470 struct rk_crypto_tmp rk_cbc_des_alg = {
471 .type = ALG_TYPE_CIPHER,
473 .base.cra_name = "cbc(des)",
474 .base.cra_driver_name = "cbc-des-rk",
475 .base.cra_priority = 300,
476 .base.cra_flags = CRYPTO_ALG_ASYNC,
477 .base.cra_blocksize = DES_BLOCK_SIZE,
478 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
479 .base.cra_alignmask = 0x07,
480 .base.cra_module = THIS_MODULE,
482 .init = rk_ablk_init_tfm,
483 .exit = rk_ablk_exit_tfm,
484 .min_keysize = DES_KEY_SIZE,
485 .max_keysize = DES_KEY_SIZE,
486 .ivsize = DES_BLOCK_SIZE,
487 .setkey = rk_des_setkey,
488 .encrypt = rk_des_cbc_encrypt,
489 .decrypt = rk_des_cbc_decrypt,
493 struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
494 .type = ALG_TYPE_CIPHER,
496 .base.cra_name = "ecb(des3_ede)",
497 .base.cra_driver_name = "ecb-des3-ede-rk",
498 .base.cra_priority = 300,
499 .base.cra_flags = CRYPTO_ALG_ASYNC,
500 .base.cra_blocksize = DES_BLOCK_SIZE,
501 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
502 .base.cra_alignmask = 0x07,
503 .base.cra_module = THIS_MODULE,
505 .init = rk_ablk_init_tfm,
506 .exit = rk_ablk_exit_tfm,
507 .min_keysize = DES3_EDE_KEY_SIZE,
508 .max_keysize = DES3_EDE_KEY_SIZE,
509 .setkey = rk_tdes_setkey,
510 .encrypt = rk_des3_ede_ecb_encrypt,
511 .decrypt = rk_des3_ede_ecb_decrypt,
515 struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
516 .type = ALG_TYPE_CIPHER,
518 .base.cra_name = "cbc(des3_ede)",
519 .base.cra_driver_name = "cbc-des3-ede-rk",
520 .base.cra_priority = 300,
521 .base.cra_flags = CRYPTO_ALG_ASYNC,
522 .base.cra_blocksize = DES_BLOCK_SIZE,
523 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
524 .base.cra_alignmask = 0x07,
525 .base.cra_module = THIS_MODULE,
527 .init = rk_ablk_init_tfm,
528 .exit = rk_ablk_exit_tfm,
529 .min_keysize = DES3_EDE_KEY_SIZE,
530 .max_keysize = DES3_EDE_KEY_SIZE,
531 .ivsize = DES_BLOCK_SIZE,
532 .setkey = rk_tdes_setkey,
533 .encrypt = rk_des3_ede_cbc_encrypt,
534 .decrypt = rk_des3_ede_cbc_decrypt,