1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * SM4 Cipher Algorithm, using ARMv8 Crypto Extensions
5 * https://tools.ietf.org/id/draft-ribose-cfrg-sm4-10.html
7 * Copyright (C) 2022, Alibaba Group.
8 * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
11 #include <linux/module.h>
12 #include <linux/crypto.h>
13 #include <linux/kernel.h>
14 #include <linux/cpufeature.h>
17 #include <crypto/b128ops.h>
18 #include <crypto/internal/simd.h>
19 #include <crypto/internal/skcipher.h>
20 #include <crypto/internal/hash.h>
21 #include <crypto/scatterwalk.h>
22 #include <crypto/xts.h>
23 #include <crypto/sm4.h>
25 #define BYTES2BLKS(nbytes) ((nbytes) >> 4)
27 asmlinkage void sm4_ce_expand_key(const u8 *key, u32 *rkey_enc, u32 *rkey_dec,
28 const u32 *fk, const u32 *ck);
29 asmlinkage void sm4_ce_crypt_block(const u32 *rkey, u8 *dst, const u8 *src);
30 asmlinkage void sm4_ce_crypt(const u32 *rkey, u8 *dst, const u8 *src,
32 asmlinkage void sm4_ce_cbc_enc(const u32 *rkey, u8 *dst, const u8 *src,
33 u8 *iv, unsigned int nblocks);
34 asmlinkage void sm4_ce_cbc_dec(const u32 *rkey, u8 *dst, const u8 *src,
35 u8 *iv, unsigned int nblocks);
36 asmlinkage void sm4_ce_cbc_cts_enc(const u32 *rkey, u8 *dst, const u8 *src,
37 u8 *iv, unsigned int nbytes);
38 asmlinkage void sm4_ce_cbc_cts_dec(const u32 *rkey, u8 *dst, const u8 *src,
39 u8 *iv, unsigned int nbytes);
40 asmlinkage void sm4_ce_cfb_enc(const u32 *rkey, u8 *dst, const u8 *src,
41 u8 *iv, unsigned int nblks);
42 asmlinkage void sm4_ce_cfb_dec(const u32 *rkey, u8 *dst, const u8 *src,
43 u8 *iv, unsigned int nblks);
44 asmlinkage void sm4_ce_ctr_enc(const u32 *rkey, u8 *dst, const u8 *src,
45 u8 *iv, unsigned int nblks);
46 asmlinkage void sm4_ce_xts_enc(const u32 *rkey1, u8 *dst, const u8 *src,
47 u8 *tweak, unsigned int nbytes,
48 const u32 *rkey2_enc);
49 asmlinkage void sm4_ce_xts_dec(const u32 *rkey1, u8 *dst, const u8 *src,
50 u8 *tweak, unsigned int nbytes,
51 const u32 *rkey2_enc);
52 asmlinkage void sm4_ce_mac_update(const u32 *rkey_enc, u8 *digest,
53 const u8 *src, unsigned int nblocks,
54 bool enc_before, bool enc_after);
56 EXPORT_SYMBOL(sm4_ce_expand_key);
57 EXPORT_SYMBOL(sm4_ce_crypt_block);
58 EXPORT_SYMBOL(sm4_ce_cbc_enc);
59 EXPORT_SYMBOL(sm4_ce_cfb_enc);
66 struct sm4_mac_tfm_ctx {
68 u8 __aligned(8) consts[];
71 struct sm4_mac_desc_ctx {
73 u8 digest[SM4_BLOCK_SIZE];
76 static int sm4_setkey(struct crypto_skcipher *tfm, const u8 *key,
79 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
81 if (key_len != SM4_KEY_SIZE)
85 sm4_ce_expand_key(key, ctx->rkey_enc, ctx->rkey_dec,
86 crypto_sm4_fk, crypto_sm4_ck);
91 static int sm4_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
94 struct sm4_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
97 if (key_len != SM4_KEY_SIZE * 2)
100 ret = xts_verify_key(tfm, key, key_len);
105 sm4_ce_expand_key(key, ctx->key1.rkey_enc,
106 ctx->key1.rkey_dec, crypto_sm4_fk, crypto_sm4_ck);
107 sm4_ce_expand_key(&key[SM4_KEY_SIZE], ctx->key2.rkey_enc,
108 ctx->key2.rkey_dec, crypto_sm4_fk, crypto_sm4_ck);
114 static int sm4_ecb_do_crypt(struct skcipher_request *req, const u32 *rkey)
116 struct skcipher_walk walk;
120 err = skcipher_walk_virt(&walk, req, false);
122 while ((nbytes = walk.nbytes) > 0) {
123 const u8 *src = walk.src.virt.addr;
124 u8 *dst = walk.dst.virt.addr;
129 nblks = BYTES2BLKS(nbytes);
131 sm4_ce_crypt(rkey, dst, src, nblks);
132 nbytes -= nblks * SM4_BLOCK_SIZE;
137 err = skcipher_walk_done(&walk, nbytes);
143 static int sm4_ecb_encrypt(struct skcipher_request *req)
145 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
146 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
148 return sm4_ecb_do_crypt(req, ctx->rkey_enc);
151 static int sm4_ecb_decrypt(struct skcipher_request *req)
153 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
154 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
156 return sm4_ecb_do_crypt(req, ctx->rkey_dec);
159 static int sm4_cbc_crypt(struct skcipher_request *req,
160 struct sm4_ctx *ctx, bool encrypt)
162 struct skcipher_walk walk;
166 err = skcipher_walk_virt(&walk, req, false);
170 while ((nbytes = walk.nbytes) > 0) {
171 const u8 *src = walk.src.virt.addr;
172 u8 *dst = walk.dst.virt.addr;
173 unsigned int nblocks;
175 nblocks = nbytes / SM4_BLOCK_SIZE;
180 sm4_ce_cbc_enc(ctx->rkey_enc, dst, src,
183 sm4_ce_cbc_dec(ctx->rkey_dec, dst, src,
189 err = skcipher_walk_done(&walk, nbytes % SM4_BLOCK_SIZE);
195 static int sm4_cbc_encrypt(struct skcipher_request *req)
197 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
198 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
200 return sm4_cbc_crypt(req, ctx, true);
203 static int sm4_cbc_decrypt(struct skcipher_request *req)
205 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
206 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
208 return sm4_cbc_crypt(req, ctx, false);
211 static int sm4_cbc_cts_crypt(struct skcipher_request *req, bool encrypt)
213 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
214 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
215 struct scatterlist *src = req->src;
216 struct scatterlist *dst = req->dst;
217 struct scatterlist sg_src[2], sg_dst[2];
218 struct skcipher_request subreq;
219 struct skcipher_walk walk;
223 if (req->cryptlen < SM4_BLOCK_SIZE)
226 if (req->cryptlen == SM4_BLOCK_SIZE)
227 return sm4_cbc_crypt(req, ctx, encrypt);
229 skcipher_request_set_tfm(&subreq, tfm);
230 skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
233 /* handle the CBC cryption part */
234 cbc_blocks = DIV_ROUND_UP(req->cryptlen, SM4_BLOCK_SIZE) - 2;
236 skcipher_request_set_crypt(&subreq, src, dst,
237 cbc_blocks * SM4_BLOCK_SIZE,
240 err = sm4_cbc_crypt(&subreq, ctx, encrypt);
244 dst = src = scatterwalk_ffwd(sg_src, src, subreq.cryptlen);
245 if (req->dst != req->src)
246 dst = scatterwalk_ffwd(sg_dst, req->dst,
250 /* handle ciphertext stealing */
251 skcipher_request_set_crypt(&subreq, src, dst,
252 req->cryptlen - cbc_blocks * SM4_BLOCK_SIZE,
255 err = skcipher_walk_virt(&walk, &subreq, false);
262 sm4_ce_cbc_cts_enc(ctx->rkey_enc, walk.dst.virt.addr,
263 walk.src.virt.addr, walk.iv, walk.nbytes);
265 sm4_ce_cbc_cts_dec(ctx->rkey_dec, walk.dst.virt.addr,
266 walk.src.virt.addr, walk.iv, walk.nbytes);
270 return skcipher_walk_done(&walk, 0);
273 static int sm4_cbc_cts_encrypt(struct skcipher_request *req)
275 return sm4_cbc_cts_crypt(req, true);
278 static int sm4_cbc_cts_decrypt(struct skcipher_request *req)
280 return sm4_cbc_cts_crypt(req, false);
283 static int sm4_cfb_encrypt(struct skcipher_request *req)
285 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
286 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
287 struct skcipher_walk walk;
291 err = skcipher_walk_virt(&walk, req, false);
293 while ((nbytes = walk.nbytes) > 0) {
294 const u8 *src = walk.src.virt.addr;
295 u8 *dst = walk.dst.virt.addr;
300 nblks = BYTES2BLKS(nbytes);
302 sm4_ce_cfb_enc(ctx->rkey_enc, dst, src, walk.iv, nblks);
303 dst += nblks * SM4_BLOCK_SIZE;
304 src += nblks * SM4_BLOCK_SIZE;
305 nbytes -= nblks * SM4_BLOCK_SIZE;
309 if (walk.nbytes == walk.total && nbytes > 0) {
310 u8 keystream[SM4_BLOCK_SIZE];
312 sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
313 crypto_xor_cpy(dst, src, keystream, nbytes);
319 err = skcipher_walk_done(&walk, nbytes);
325 static int sm4_cfb_decrypt(struct skcipher_request *req)
327 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
328 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
329 struct skcipher_walk walk;
333 err = skcipher_walk_virt(&walk, req, false);
335 while ((nbytes = walk.nbytes) > 0) {
336 const u8 *src = walk.src.virt.addr;
337 u8 *dst = walk.dst.virt.addr;
342 nblks = BYTES2BLKS(nbytes);
344 sm4_ce_cfb_dec(ctx->rkey_enc, dst, src, walk.iv, nblks);
345 dst += nblks * SM4_BLOCK_SIZE;
346 src += nblks * SM4_BLOCK_SIZE;
347 nbytes -= nblks * SM4_BLOCK_SIZE;
351 if (walk.nbytes == walk.total && nbytes > 0) {
352 u8 keystream[SM4_BLOCK_SIZE];
354 sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
355 crypto_xor_cpy(dst, src, keystream, nbytes);
361 err = skcipher_walk_done(&walk, nbytes);
367 static int sm4_ctr_crypt(struct skcipher_request *req)
369 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
370 struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
371 struct skcipher_walk walk;
375 err = skcipher_walk_virt(&walk, req, false);
377 while ((nbytes = walk.nbytes) > 0) {
378 const u8 *src = walk.src.virt.addr;
379 u8 *dst = walk.dst.virt.addr;
384 nblks = BYTES2BLKS(nbytes);
386 sm4_ce_ctr_enc(ctx->rkey_enc, dst, src, walk.iv, nblks);
387 dst += nblks * SM4_BLOCK_SIZE;
388 src += nblks * SM4_BLOCK_SIZE;
389 nbytes -= nblks * SM4_BLOCK_SIZE;
393 if (walk.nbytes == walk.total && nbytes > 0) {
394 u8 keystream[SM4_BLOCK_SIZE];
396 sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
397 crypto_inc(walk.iv, SM4_BLOCK_SIZE);
398 crypto_xor_cpy(dst, src, keystream, nbytes);
404 err = skcipher_walk_done(&walk, nbytes);
410 static int sm4_xts_crypt(struct skcipher_request *req, bool encrypt)
412 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
413 struct sm4_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
414 int tail = req->cryptlen % SM4_BLOCK_SIZE;
415 const u32 *rkey2_enc = ctx->key2.rkey_enc;
416 struct scatterlist sg_src[2], sg_dst[2];
417 struct skcipher_request subreq;
418 struct scatterlist *src, *dst;
419 struct skcipher_walk walk;
423 if (req->cryptlen < SM4_BLOCK_SIZE)
426 err = skcipher_walk_virt(&walk, req, false);
430 if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
431 int nblocks = DIV_ROUND_UP(req->cryptlen, SM4_BLOCK_SIZE) - 2;
433 skcipher_walk_abort(&walk);
435 skcipher_request_set_tfm(&subreq, tfm);
436 skcipher_request_set_callback(&subreq,
437 skcipher_request_flags(req),
439 skcipher_request_set_crypt(&subreq, req->src, req->dst,
440 nblocks * SM4_BLOCK_SIZE, req->iv);
442 err = skcipher_walk_virt(&walk, &subreq, false);
449 while ((nbytes = walk.nbytes) >= SM4_BLOCK_SIZE) {
450 if (nbytes < walk.total)
451 nbytes &= ~(SM4_BLOCK_SIZE - 1);
456 sm4_ce_xts_enc(ctx->key1.rkey_enc, walk.dst.virt.addr,
457 walk.src.virt.addr, walk.iv, nbytes,
460 sm4_ce_xts_dec(ctx->key1.rkey_dec, walk.dst.virt.addr,
461 walk.src.virt.addr, walk.iv, nbytes,
468 err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
473 if (likely(tail == 0))
476 /* handle ciphertext stealing */
478 dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
479 if (req->dst != req->src)
480 dst = scatterwalk_ffwd(sg_dst, req->dst, subreq.cryptlen);
482 skcipher_request_set_crypt(&subreq, src, dst, SM4_BLOCK_SIZE + tail,
485 err = skcipher_walk_virt(&walk, &subreq, false);
492 sm4_ce_xts_enc(ctx->key1.rkey_enc, walk.dst.virt.addr,
493 walk.src.virt.addr, walk.iv, walk.nbytes,
496 sm4_ce_xts_dec(ctx->key1.rkey_dec, walk.dst.virt.addr,
497 walk.src.virt.addr, walk.iv, walk.nbytes,
502 return skcipher_walk_done(&walk, 0);
505 static int sm4_xts_encrypt(struct skcipher_request *req)
507 return sm4_xts_crypt(req, true);
510 static int sm4_xts_decrypt(struct skcipher_request *req)
512 return sm4_xts_crypt(req, false);
515 static struct skcipher_alg sm4_algs[] = {
518 .cra_name = "ecb(sm4)",
519 .cra_driver_name = "ecb-sm4-ce",
521 .cra_blocksize = SM4_BLOCK_SIZE,
522 .cra_ctxsize = sizeof(struct sm4_ctx),
523 .cra_module = THIS_MODULE,
525 .min_keysize = SM4_KEY_SIZE,
526 .max_keysize = SM4_KEY_SIZE,
527 .setkey = sm4_setkey,
528 .encrypt = sm4_ecb_encrypt,
529 .decrypt = sm4_ecb_decrypt,
532 .cra_name = "cbc(sm4)",
533 .cra_driver_name = "cbc-sm4-ce",
535 .cra_blocksize = SM4_BLOCK_SIZE,
536 .cra_ctxsize = sizeof(struct sm4_ctx),
537 .cra_module = THIS_MODULE,
539 .min_keysize = SM4_KEY_SIZE,
540 .max_keysize = SM4_KEY_SIZE,
541 .ivsize = SM4_BLOCK_SIZE,
542 .setkey = sm4_setkey,
543 .encrypt = sm4_cbc_encrypt,
544 .decrypt = sm4_cbc_decrypt,
547 .cra_name = "cfb(sm4)",
548 .cra_driver_name = "cfb-sm4-ce",
551 .cra_ctxsize = sizeof(struct sm4_ctx),
552 .cra_module = THIS_MODULE,
554 .min_keysize = SM4_KEY_SIZE,
555 .max_keysize = SM4_KEY_SIZE,
556 .ivsize = SM4_BLOCK_SIZE,
557 .chunksize = SM4_BLOCK_SIZE,
558 .setkey = sm4_setkey,
559 .encrypt = sm4_cfb_encrypt,
560 .decrypt = sm4_cfb_decrypt,
563 .cra_name = "ctr(sm4)",
564 .cra_driver_name = "ctr-sm4-ce",
567 .cra_ctxsize = sizeof(struct sm4_ctx),
568 .cra_module = THIS_MODULE,
570 .min_keysize = SM4_KEY_SIZE,
571 .max_keysize = SM4_KEY_SIZE,
572 .ivsize = SM4_BLOCK_SIZE,
573 .chunksize = SM4_BLOCK_SIZE,
574 .setkey = sm4_setkey,
575 .encrypt = sm4_ctr_crypt,
576 .decrypt = sm4_ctr_crypt,
579 .cra_name = "cts(cbc(sm4))",
580 .cra_driver_name = "cts-cbc-sm4-ce",
582 .cra_blocksize = SM4_BLOCK_SIZE,
583 .cra_ctxsize = sizeof(struct sm4_ctx),
584 .cra_module = THIS_MODULE,
586 .min_keysize = SM4_KEY_SIZE,
587 .max_keysize = SM4_KEY_SIZE,
588 .ivsize = SM4_BLOCK_SIZE,
589 .walksize = SM4_BLOCK_SIZE * 2,
590 .setkey = sm4_setkey,
591 .encrypt = sm4_cbc_cts_encrypt,
592 .decrypt = sm4_cbc_cts_decrypt,
595 .cra_name = "xts(sm4)",
596 .cra_driver_name = "xts-sm4-ce",
598 .cra_blocksize = SM4_BLOCK_SIZE,
599 .cra_ctxsize = sizeof(struct sm4_xts_ctx),
600 .cra_module = THIS_MODULE,
602 .min_keysize = SM4_KEY_SIZE * 2,
603 .max_keysize = SM4_KEY_SIZE * 2,
604 .ivsize = SM4_BLOCK_SIZE,
605 .walksize = SM4_BLOCK_SIZE * 2,
606 .setkey = sm4_xts_setkey,
607 .encrypt = sm4_xts_encrypt,
608 .decrypt = sm4_xts_decrypt,
612 static int sm4_cbcmac_setkey(struct crypto_shash *tfm, const u8 *key,
613 unsigned int key_len)
615 struct sm4_mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
617 if (key_len != SM4_KEY_SIZE)
621 sm4_ce_expand_key(key, ctx->key.rkey_enc, ctx->key.rkey_dec,
622 crypto_sm4_fk, crypto_sm4_ck);
628 static int sm4_cmac_setkey(struct crypto_shash *tfm, const u8 *key,
629 unsigned int key_len)
631 struct sm4_mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
632 be128 *consts = (be128 *)ctx->consts;
635 if (key_len != SM4_KEY_SIZE)
638 memset(consts, 0, SM4_BLOCK_SIZE);
642 sm4_ce_expand_key(key, ctx->key.rkey_enc, ctx->key.rkey_dec,
643 crypto_sm4_fk, crypto_sm4_ck);
645 /* encrypt the zero block */
646 sm4_ce_crypt_block(ctx->key.rkey_enc, (u8 *)consts, (const u8 *)consts);
650 /* gf(2^128) multiply zero-ciphertext with u and u^2 */
651 a = be64_to_cpu(consts[0].a);
652 b = be64_to_cpu(consts[0].b);
653 consts[0].a = cpu_to_be64((a << 1) | (b >> 63));
654 consts[0].b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));
656 a = be64_to_cpu(consts[0].a);
657 b = be64_to_cpu(consts[0].b);
658 consts[1].a = cpu_to_be64((a << 1) | (b >> 63));
659 consts[1].b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));
664 static int sm4_xcbc_setkey(struct crypto_shash *tfm, const u8 *key,
665 unsigned int key_len)
667 struct sm4_mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
668 u8 __aligned(8) key2[SM4_BLOCK_SIZE];
669 static u8 const ks[3][SM4_BLOCK_SIZE] = {
670 { [0 ... SM4_BLOCK_SIZE - 1] = 0x1},
671 { [0 ... SM4_BLOCK_SIZE - 1] = 0x2},
672 { [0 ... SM4_BLOCK_SIZE - 1] = 0x3},
675 if (key_len != SM4_KEY_SIZE)
680 sm4_ce_expand_key(key, ctx->key.rkey_enc, ctx->key.rkey_dec,
681 crypto_sm4_fk, crypto_sm4_ck);
683 sm4_ce_crypt_block(ctx->key.rkey_enc, key2, ks[0]);
684 sm4_ce_crypt(ctx->key.rkey_enc, ctx->consts, ks[1], 2);
686 sm4_ce_expand_key(key2, ctx->key.rkey_enc, ctx->key.rkey_dec,
687 crypto_sm4_fk, crypto_sm4_ck);
694 static int sm4_mac_init(struct shash_desc *desc)
696 struct sm4_mac_desc_ctx *ctx = shash_desc_ctx(desc);
698 memset(ctx->digest, 0, SM4_BLOCK_SIZE);
704 static int sm4_mac_update(struct shash_desc *desc, const u8 *p,
707 struct sm4_mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
708 struct sm4_mac_desc_ctx *ctx = shash_desc_ctx(desc);
709 unsigned int l, nblocks;
714 if (ctx->len || ctx->len + len < SM4_BLOCK_SIZE) {
715 l = min(len, SM4_BLOCK_SIZE - ctx->len);
717 crypto_xor(ctx->digest + ctx->len, p, l);
723 if (len && (ctx->len % SM4_BLOCK_SIZE) == 0) {
726 if (len < SM4_BLOCK_SIZE && ctx->len == SM4_BLOCK_SIZE) {
727 sm4_ce_crypt_block(tctx->key.rkey_enc,
728 ctx->digest, ctx->digest);
731 nblocks = len / SM4_BLOCK_SIZE;
732 len %= SM4_BLOCK_SIZE;
734 sm4_ce_mac_update(tctx->key.rkey_enc, ctx->digest, p,
735 nblocks, (ctx->len == SM4_BLOCK_SIZE),
738 p += nblocks * SM4_BLOCK_SIZE;
741 ctx->len = SM4_BLOCK_SIZE;
747 crypto_xor(ctx->digest, p, len);
755 static int sm4_cmac_final(struct shash_desc *desc, u8 *out)
757 struct sm4_mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
758 struct sm4_mac_desc_ctx *ctx = shash_desc_ctx(desc);
759 const u8 *consts = tctx->consts;
761 if (ctx->len != SM4_BLOCK_SIZE) {
762 ctx->digest[ctx->len] ^= 0x80;
763 consts += SM4_BLOCK_SIZE;
767 sm4_ce_mac_update(tctx->key.rkey_enc, ctx->digest, consts, 1,
771 memcpy(out, ctx->digest, SM4_BLOCK_SIZE);
776 static int sm4_cbcmac_final(struct shash_desc *desc, u8 *out)
778 struct sm4_mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
779 struct sm4_mac_desc_ctx *ctx = shash_desc_ctx(desc);
783 sm4_ce_crypt_block(tctx->key.rkey_enc, ctx->digest,
788 memcpy(out, ctx->digest, SM4_BLOCK_SIZE);
793 static struct shash_alg sm4_mac_algs[] = {
796 .cra_name = "cmac(sm4)",
797 .cra_driver_name = "cmac-sm4-ce",
799 .cra_blocksize = SM4_BLOCK_SIZE,
800 .cra_ctxsize = sizeof(struct sm4_mac_tfm_ctx)
801 + SM4_BLOCK_SIZE * 2,
802 .cra_module = THIS_MODULE,
804 .digestsize = SM4_BLOCK_SIZE,
805 .init = sm4_mac_init,
806 .update = sm4_mac_update,
807 .final = sm4_cmac_final,
808 .setkey = sm4_cmac_setkey,
809 .descsize = sizeof(struct sm4_mac_desc_ctx),
812 .cra_name = "xcbc(sm4)",
813 .cra_driver_name = "xcbc-sm4-ce",
815 .cra_blocksize = SM4_BLOCK_SIZE,
816 .cra_ctxsize = sizeof(struct sm4_mac_tfm_ctx)
817 + SM4_BLOCK_SIZE * 2,
818 .cra_module = THIS_MODULE,
820 .digestsize = SM4_BLOCK_SIZE,
821 .init = sm4_mac_init,
822 .update = sm4_mac_update,
823 .final = sm4_cmac_final,
824 .setkey = sm4_xcbc_setkey,
825 .descsize = sizeof(struct sm4_mac_desc_ctx),
828 .cra_name = "cbcmac(sm4)",
829 .cra_driver_name = "cbcmac-sm4-ce",
832 .cra_ctxsize = sizeof(struct sm4_mac_tfm_ctx),
833 .cra_module = THIS_MODULE,
835 .digestsize = SM4_BLOCK_SIZE,
836 .init = sm4_mac_init,
837 .update = sm4_mac_update,
838 .final = sm4_cbcmac_final,
839 .setkey = sm4_cbcmac_setkey,
840 .descsize = sizeof(struct sm4_mac_desc_ctx),
844 static int __init sm4_init(void)
848 err = crypto_register_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs));
852 err = crypto_register_shashes(sm4_mac_algs, ARRAY_SIZE(sm4_mac_algs));
859 crypto_unregister_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs));
863 static void __exit sm4_exit(void)
865 crypto_unregister_shashes(sm4_mac_algs, ARRAY_SIZE(sm4_mac_algs));
866 crypto_unregister_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs));
869 module_cpu_feature_match(SM4, sm4_init);
870 module_exit(sm4_exit);
872 MODULE_DESCRIPTION("SM4 ECB/CBC/CFB/CTR/XTS using ARMv8 Crypto Extensions");
873 MODULE_ALIAS_CRYPTO("sm4-ce");
874 MODULE_ALIAS_CRYPTO("sm4");
875 MODULE_ALIAS_CRYPTO("ecb(sm4)");
876 MODULE_ALIAS_CRYPTO("cbc(sm4)");
877 MODULE_ALIAS_CRYPTO("cfb(sm4)");
878 MODULE_ALIAS_CRYPTO("ctr(sm4)");
879 MODULE_ALIAS_CRYPTO("cts(cbc(sm4))");
880 MODULE_ALIAS_CRYPTO("xts(sm4)");
881 MODULE_ALIAS_CRYPTO("cmac(sm4)");
882 MODULE_ALIAS_CRYPTO("xcbc(sm4)");
883 MODULE_ALIAS_CRYPTO("cbcmac(sm4)");
884 MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
885 MODULE_LICENSE("GPL v2");