2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
7 * Based on talitos crypto API driver.
9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11 * --------------- ---------------
12 * | JobDesc #1 |-------------------->| ShareDesc |
13 * | *(packet 1) | | (PDB) |
14 * --------------- |------------->| (hashKey) |
16 * . | |-------->| (operation) |
17 * --------------- | | ---------------
18 * | JobDesc #2 |------| |
24 * | JobDesc #3 |------------
28 * The SharedDesc never changes for a connection unless rekeyed, but
29 * each packet will likely be in a different place. So all we need
30 * to know to process the packet is where the input is, where the
31 * output goes, and what context we want to process with. Context is
32 * in the SharedDesc, packet references in the JobDesc.
34 * So, a job desc looks like:
36 * ---------------------
38 * | ShareDesc Pointer |
45 * ---------------------
52 #include "desc_constr.h"
55 #include "sg_sw_sec4.h"
57 #include "caamalg_desc.h"
62 #define CAAM_CRA_PRIORITY 3000
63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
65 CTR_RFC3686_NONCE_SIZE + \
66 SHA512_DIGEST_SIZE * 2)
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
78 /* for print_hex_dumps with line references */
79 #define debug(format, arg...) printk(format, arg)
81 #define debug(format, arg...)
84 static struct list_head alg_list;
86 struct caam_alg_entry {
93 struct caam_aead_alg {
95 struct caam_alg_entry caam;
100 * per-session context
103 u32 sh_desc_enc[DESC_MAX_USED_LEN];
104 u32 sh_desc_dec[DESC_MAX_USED_LEN];
105 u32 sh_desc_givenc[DESC_MAX_USED_LEN];
106 u8 key[CAAM_MAX_KEY_SIZE];
107 dma_addr_t sh_desc_enc_dma;
108 dma_addr_t sh_desc_dec_dma;
109 dma_addr_t sh_desc_givenc_dma;
111 struct device *jrdev;
112 struct alginfo adata;
113 struct alginfo cdata;
114 unsigned int authsize;
117 static int aead_null_set_sh_desc(struct crypto_aead *aead)
119 struct caam_ctx *ctx = crypto_aead_ctx(aead);
120 struct device *jrdev = ctx->jrdev;
122 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
123 ctx->adata.keylen_pad;
126 * Job Descriptor and Shared Descriptors
127 * must all fit into the 64-word Descriptor h/w Buffer
129 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
130 ctx->adata.key_inline = true;
131 ctx->adata.key_virt = ctx->key;
133 ctx->adata.key_inline = false;
134 ctx->adata.key_dma = ctx->key_dma;
137 /* aead_encrypt shared descriptor */
138 desc = ctx->sh_desc_enc;
139 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize);
140 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
141 desc_bytes(desc), DMA_TO_DEVICE);
144 * Job Descriptor and Shared Descriptors
145 * must all fit into the 64-word Descriptor h/w Buffer
147 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
148 ctx->adata.key_inline = true;
149 ctx->adata.key_virt = ctx->key;
151 ctx->adata.key_inline = false;
152 ctx->adata.key_dma = ctx->key_dma;
155 /* aead_decrypt shared descriptor */
156 desc = ctx->sh_desc_dec;
157 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize);
158 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
159 desc_bytes(desc), DMA_TO_DEVICE);
164 static int aead_set_sh_desc(struct crypto_aead *aead)
166 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
167 struct caam_aead_alg, aead);
168 unsigned int ivsize = crypto_aead_ivsize(aead);
169 struct caam_ctx *ctx = crypto_aead_ctx(aead);
170 struct device *jrdev = ctx->jrdev;
172 u32 *desc, *nonce = NULL;
174 unsigned int data_len[2];
175 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
176 OP_ALG_AAI_CTR_MOD128);
177 const bool is_rfc3686 = alg->caam.rfc3686;
182 /* NULL encryption / decryption */
183 if (!ctx->cdata.keylen)
184 return aead_null_set_sh_desc(aead);
187 * AES-CTR needs to load IV in CONTEXT1 reg
188 * at an offset of 128bits (16bytes)
189 * CONTEXT1[255:128] = IV
196 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
199 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
200 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
201 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
204 data_len[0] = ctx->adata.keylen_pad;
205 data_len[1] = ctx->cdata.keylen;
211 * Job Descriptor and Shared Descriptors
212 * must all fit into the 64-word Descriptor h/w Buffer
214 if (desc_inline_query(DESC_AEAD_ENC_LEN +
215 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
216 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
217 ARRAY_SIZE(data_len)) < 0)
221 ctx->adata.key_virt = ctx->key;
223 ctx->adata.key_dma = ctx->key_dma;
226 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
228 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
230 ctx->adata.key_inline = !!(inl_mask & 1);
231 ctx->cdata.key_inline = !!(inl_mask & 2);
233 /* aead_encrypt shared descriptor */
234 desc = ctx->sh_desc_enc;
235 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
236 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
238 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
239 desc_bytes(desc), DMA_TO_DEVICE);
243 * Job Descriptor and Shared Descriptors
244 * must all fit into the 64-word Descriptor h/w Buffer
246 if (desc_inline_query(DESC_AEAD_DEC_LEN +
247 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
248 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
249 ARRAY_SIZE(data_len)) < 0)
253 ctx->adata.key_virt = ctx->key;
255 ctx->adata.key_dma = ctx->key_dma;
258 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
260 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
262 ctx->adata.key_inline = !!(inl_mask & 1);
263 ctx->cdata.key_inline = !!(inl_mask & 2);
265 /* aead_decrypt shared descriptor */
266 desc = ctx->sh_desc_dec;
267 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
268 ctx->authsize, alg->caam.geniv, is_rfc3686,
269 nonce, ctx1_iv_off, false);
270 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
271 desc_bytes(desc), DMA_TO_DEVICE);
273 if (!alg->caam.geniv)
277 * Job Descriptor and Shared Descriptors
278 * must all fit into the 64-word Descriptor h/w Buffer
280 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
281 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
282 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
283 ARRAY_SIZE(data_len)) < 0)
287 ctx->adata.key_virt = ctx->key;
289 ctx->adata.key_dma = ctx->key_dma;
292 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
294 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
296 ctx->adata.key_inline = !!(inl_mask & 1);
297 ctx->cdata.key_inline = !!(inl_mask & 2);
299 /* aead_givencrypt shared descriptor */
300 desc = ctx->sh_desc_enc;
301 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
302 ctx->authsize, is_rfc3686, nonce,
304 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
305 desc_bytes(desc), DMA_TO_DEVICE);
311 static int aead_setauthsize(struct crypto_aead *authenc,
312 unsigned int authsize)
314 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
316 ctx->authsize = authsize;
317 aead_set_sh_desc(authenc);
322 static int gcm_set_sh_desc(struct crypto_aead *aead)
324 struct caam_ctx *ctx = crypto_aead_ctx(aead);
325 struct device *jrdev = ctx->jrdev;
327 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
330 if (!ctx->cdata.keylen || !ctx->authsize)
334 * AES GCM encrypt shared descriptor
335 * Job Descriptor and Shared Descriptor
336 * must fit into the 64-word Descriptor h/w Buffer
338 if (rem_bytes >= DESC_GCM_ENC_LEN) {
339 ctx->cdata.key_inline = true;
340 ctx->cdata.key_virt = ctx->key;
342 ctx->cdata.key_inline = false;
343 ctx->cdata.key_dma = ctx->key_dma;
346 desc = ctx->sh_desc_enc;
347 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize);
348 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
349 desc_bytes(desc), DMA_TO_DEVICE);
352 * Job Descriptor and Shared Descriptors
353 * must all fit into the 64-word Descriptor h/w Buffer
355 if (rem_bytes >= DESC_GCM_DEC_LEN) {
356 ctx->cdata.key_inline = true;
357 ctx->cdata.key_virt = ctx->key;
359 ctx->cdata.key_inline = false;
360 ctx->cdata.key_dma = ctx->key_dma;
363 desc = ctx->sh_desc_dec;
364 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize);
365 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
366 desc_bytes(desc), DMA_TO_DEVICE);
371 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
373 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
375 ctx->authsize = authsize;
376 gcm_set_sh_desc(authenc);
381 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
383 struct caam_ctx *ctx = crypto_aead_ctx(aead);
384 struct device *jrdev = ctx->jrdev;
386 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
389 if (!ctx->cdata.keylen || !ctx->authsize)
393 * RFC4106 encrypt shared descriptor
394 * Job Descriptor and Shared Descriptor
395 * must fit into the 64-word Descriptor h/w Buffer
397 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
398 ctx->cdata.key_inline = true;
399 ctx->cdata.key_virt = ctx->key;
401 ctx->cdata.key_inline = false;
402 ctx->cdata.key_dma = ctx->key_dma;
405 desc = ctx->sh_desc_enc;
406 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize);
407 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
408 desc_bytes(desc), DMA_TO_DEVICE);
411 * Job Descriptor and Shared Descriptors
412 * must all fit into the 64-word Descriptor h/w Buffer
414 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
415 ctx->cdata.key_inline = true;
416 ctx->cdata.key_virt = ctx->key;
418 ctx->cdata.key_inline = false;
419 ctx->cdata.key_dma = ctx->key_dma;
422 desc = ctx->sh_desc_dec;
423 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize);
424 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
425 desc_bytes(desc), DMA_TO_DEVICE);
430 static int rfc4106_setauthsize(struct crypto_aead *authenc,
431 unsigned int authsize)
433 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
435 ctx->authsize = authsize;
436 rfc4106_set_sh_desc(authenc);
441 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
443 struct caam_ctx *ctx = crypto_aead_ctx(aead);
444 struct device *jrdev = ctx->jrdev;
446 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
449 if (!ctx->cdata.keylen || !ctx->authsize)
453 * RFC4543 encrypt shared descriptor
454 * Job Descriptor and Shared Descriptor
455 * must fit into the 64-word Descriptor h/w Buffer
457 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
458 ctx->cdata.key_inline = true;
459 ctx->cdata.key_virt = ctx->key;
461 ctx->cdata.key_inline = false;
462 ctx->cdata.key_dma = ctx->key_dma;
465 desc = ctx->sh_desc_enc;
466 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize);
467 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
468 desc_bytes(desc), DMA_TO_DEVICE);
471 * Job Descriptor and Shared Descriptors
472 * must all fit into the 64-word Descriptor h/w Buffer
474 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
475 ctx->cdata.key_inline = true;
476 ctx->cdata.key_virt = ctx->key;
478 ctx->cdata.key_inline = false;
479 ctx->cdata.key_dma = ctx->key_dma;
482 desc = ctx->sh_desc_dec;
483 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize);
484 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
485 desc_bytes(desc), DMA_TO_DEVICE);
490 static int rfc4543_setauthsize(struct crypto_aead *authenc,
491 unsigned int authsize)
493 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
495 ctx->authsize = authsize;
496 rfc4543_set_sh_desc(authenc);
501 static int aead_setkey(struct crypto_aead *aead,
502 const u8 *key, unsigned int keylen)
504 struct caam_ctx *ctx = crypto_aead_ctx(aead);
505 struct device *jrdev = ctx->jrdev;
506 struct crypto_authenc_keys keys;
509 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
513 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
514 keys.authkeylen + keys.enckeylen, keys.enckeylen,
516 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
517 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
520 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
521 keys.authkeylen, CAAM_MAX_KEY_SIZE -
527 /* postpend encryption key to auth split key */
528 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
529 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
530 keys.enckeylen, DMA_TO_DEVICE);
532 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
533 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
534 ctx->adata.keylen_pad + keys.enckeylen, 1);
536 ctx->cdata.keylen = keys.enckeylen;
537 return aead_set_sh_desc(aead);
539 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
543 static int gcm_setkey(struct crypto_aead *aead,
544 const u8 *key, unsigned int keylen)
546 struct caam_ctx *ctx = crypto_aead_ctx(aead);
547 struct device *jrdev = ctx->jrdev;
550 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
551 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
554 memcpy(ctx->key, key, keylen);
555 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
556 ctx->cdata.keylen = keylen;
558 return gcm_set_sh_desc(aead);
561 static int rfc4106_setkey(struct crypto_aead *aead,
562 const u8 *key, unsigned int keylen)
564 struct caam_ctx *ctx = crypto_aead_ctx(aead);
565 struct device *jrdev = ctx->jrdev;
571 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
572 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
575 memcpy(ctx->key, key, keylen);
578 * The last four bytes of the key material are used as the salt value
579 * in the nonce. Update the AES key length.
581 ctx->cdata.keylen = keylen - 4;
582 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
584 return rfc4106_set_sh_desc(aead);
587 static int rfc4543_setkey(struct crypto_aead *aead,
588 const u8 *key, unsigned int keylen)
590 struct caam_ctx *ctx = crypto_aead_ctx(aead);
591 struct device *jrdev = ctx->jrdev;
597 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
598 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
601 memcpy(ctx->key, key, keylen);
604 * The last four bytes of the key material are used as the salt value
605 * in the nonce. Update the AES key length.
607 ctx->cdata.keylen = keylen - 4;
608 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
610 return rfc4543_set_sh_desc(aead);
613 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
614 const u8 *key, unsigned int keylen)
616 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
617 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
618 const char *alg_name = crypto_tfm_alg_name(tfm);
619 struct device *jrdev = ctx->jrdev;
620 unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
623 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
624 OP_ALG_AAI_CTR_MOD128);
625 const bool is_rfc3686 = (ctr_mode &&
626 (strstr(alg_name, "rfc3686") != NULL));
628 memcpy(ctx->key, key, keylen);
630 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
631 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
634 * AES-CTR needs to load IV in CONTEXT1 reg
635 * at an offset of 128bits (16bytes)
636 * CONTEXT1[255:128] = IV
643 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
644 * | *key = {KEY, NONCE}
647 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
648 keylen -= CTR_RFC3686_NONCE_SIZE;
651 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
652 ctx->cdata.keylen = keylen;
653 ctx->cdata.key_virt = ctx->key;
654 ctx->cdata.key_inline = true;
656 /* ablkcipher_encrypt shared descriptor */
657 desc = ctx->sh_desc_enc;
658 cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
660 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
661 desc_bytes(desc), DMA_TO_DEVICE);
663 /* ablkcipher_decrypt shared descriptor */
664 desc = ctx->sh_desc_dec;
665 cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
667 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
668 desc_bytes(desc), DMA_TO_DEVICE);
670 /* ablkcipher_givencrypt shared descriptor */
671 desc = ctx->sh_desc_givenc;
672 cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
674 dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma,
675 desc_bytes(desc), DMA_TO_DEVICE);
680 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
681 const u8 *key, unsigned int keylen)
683 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
684 struct device *jrdev = ctx->jrdev;
687 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
688 crypto_ablkcipher_set_flags(ablkcipher,
689 CRYPTO_TFM_RES_BAD_KEY_LEN);
690 dev_err(jrdev, "key size mismatch\n");
694 memcpy(ctx->key, key, keylen);
695 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
696 ctx->cdata.keylen = keylen;
697 ctx->cdata.key_virt = ctx->key;
698 ctx->cdata.key_inline = true;
700 /* xts_ablkcipher_encrypt shared descriptor */
701 desc = ctx->sh_desc_enc;
702 cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
703 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
704 desc_bytes(desc), DMA_TO_DEVICE);
706 /* xts_ablkcipher_decrypt shared descriptor */
707 desc = ctx->sh_desc_dec;
708 cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
709 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
710 desc_bytes(desc), DMA_TO_DEVICE);
716 * aead_edesc - s/w-extended aead descriptor
717 * @src_nents: number of segments in input s/w scatterlist
718 * @dst_nents: number of segments in output s/w scatterlist
719 * @sec4_sg_bytes: length of dma mapped sec4_sg space
720 * @sec4_sg_dma: bus physical mapped address of h/w link table
721 * @sec4_sg: pointer to h/w link table
722 * @hw_desc: the h/w job descriptor followed by any referenced link tables
728 dma_addr_t sec4_sg_dma;
729 struct sec4_sg_entry *sec4_sg;
734 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
735 * @src_nents: number of segments in input s/w scatterlist
736 * @dst_nents: number of segments in output s/w scatterlist
737 * @iv_dma: dma address of iv for checking continuity and link table
738 * @iv_dir: DMA mapping direction for IV
739 * @sec4_sg_bytes: length of dma mapped sec4_sg space
740 * @sec4_sg_dma: bus physical mapped address of h/w link table
741 * @sec4_sg: pointer to h/w link table
742 * @hw_desc: the h/w job descriptor followed by any referenced link tables
745 struct ablkcipher_edesc {
749 enum dma_data_direction iv_dir;
751 dma_addr_t sec4_sg_dma;
752 struct sec4_sg_entry *sec4_sg;
756 static void caam_unmap(struct device *dev, struct scatterlist *src,
757 struct scatterlist *dst, int src_nents,
759 dma_addr_t iv_dma, int ivsize,
760 enum dma_data_direction iv_dir, dma_addr_t sec4_sg_dma,
765 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
766 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
768 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
772 dma_unmap_single(dev, iv_dma, ivsize, iv_dir);
774 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
778 static void aead_unmap(struct device *dev,
779 struct aead_edesc *edesc,
780 struct aead_request *req)
782 caam_unmap(dev, req->src, req->dst,
783 edesc->src_nents, edesc->dst_nents, 0, 0, DMA_NONE,
784 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
787 static void ablkcipher_unmap(struct device *dev,
788 struct ablkcipher_edesc *edesc,
789 struct ablkcipher_request *req)
791 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
792 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
794 caam_unmap(dev, req->src, req->dst,
795 edesc->src_nents, edesc->dst_nents,
796 edesc->iv_dma, ivsize, edesc->iv_dir,
797 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
800 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
803 struct aead_request *req = context;
804 struct aead_edesc *edesc;
807 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
810 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
813 caam_jr_strstatus(jrdev, err);
815 aead_unmap(jrdev, edesc, req);
819 aead_request_complete(req, err);
822 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
825 struct aead_request *req = context;
826 struct aead_edesc *edesc;
829 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
832 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
835 caam_jr_strstatus(jrdev, err);
837 aead_unmap(jrdev, edesc, req);
840 * verify hw auth check passed else return -EBADMSG
842 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
847 aead_request_complete(req, err);
850 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
853 struct ablkcipher_request *req = context;
854 struct ablkcipher_edesc *edesc;
855 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
856 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
857 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
860 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
863 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
866 caam_jr_strstatus(jrdev, err);
869 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
870 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
871 edesc->src_nents > 1 ? 100 : ivsize, 1);
873 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
874 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
875 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
877 ablkcipher_unmap(jrdev, edesc, req);
880 * The crypto API expects us to set the IV (req->info) to the last
881 * ciphertext block when running in CBC mode.
883 if ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == OP_ALG_AAI_CBC)
884 scatterwalk_map_and_copy(req->info, req->dst, req->nbytes -
887 /* In case initial IV was generated, copy it in GIVCIPHER request */
888 if (edesc->iv_dir == DMA_FROM_DEVICE) {
890 struct skcipher_givcrypt_request *greq;
892 greq = container_of(req, struct skcipher_givcrypt_request,
894 iv = (u8 *)edesc->hw_desc + desc_bytes(edesc->hw_desc) +
895 edesc->sec4_sg_bytes;
896 memcpy(greq->giv, iv, ivsize);
901 ablkcipher_request_complete(req, err);
904 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
907 struct ablkcipher_request *req = context;
908 struct ablkcipher_edesc *edesc;
910 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
911 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
913 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
916 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
918 caam_jr_strstatus(jrdev, err);
921 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
922 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
925 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ",
926 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
927 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
929 ablkcipher_unmap(jrdev, edesc, req);
932 ablkcipher_request_complete(req, err);
936 * Fill in aead job descriptor
938 static void init_aead_job(struct aead_request *req,
939 struct aead_edesc *edesc,
940 bool all_contig, bool encrypt)
942 struct crypto_aead *aead = crypto_aead_reqtfm(req);
943 struct caam_ctx *ctx = crypto_aead_ctx(aead);
944 int authsize = ctx->authsize;
945 u32 *desc = edesc->hw_desc;
946 u32 out_options, in_options;
947 dma_addr_t dst_dma, src_dma;
948 int len, sec4_sg_index = 0;
952 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
953 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
955 len = desc_len(sh_desc);
956 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
959 src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
962 src_dma = edesc->sec4_sg_dma;
963 sec4_sg_index += edesc->src_nents;
964 in_options = LDST_SGF;
967 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
971 out_options = in_options;
973 if (unlikely(req->src != req->dst)) {
974 if (edesc->dst_nents == 1) {
975 dst_dma = sg_dma_address(req->dst);
977 dst_dma = edesc->sec4_sg_dma +
979 sizeof(struct sec4_sg_entry);
980 out_options = LDST_SGF;
985 append_seq_out_ptr(desc, dst_dma,
986 req->assoclen + req->cryptlen + authsize,
989 append_seq_out_ptr(desc, dst_dma,
990 req->assoclen + req->cryptlen - authsize,
993 /* REG3 = assoclen */
994 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
997 static void init_gcm_job(struct aead_request *req,
998 struct aead_edesc *edesc,
999 bool all_contig, bool encrypt)
1001 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1002 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1003 unsigned int ivsize = crypto_aead_ivsize(aead);
1004 u32 *desc = edesc->hw_desc;
1005 bool generic_gcm = (ivsize == 12);
1008 init_aead_job(req, edesc, all_contig, encrypt);
1010 /* BUG This should not be specific to generic GCM. */
1012 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1013 last = FIFOLD_TYPE_LAST1;
1016 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1017 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
1020 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1022 append_data(desc, req->iv, ivsize);
1023 /* End of blank commands */
1026 static void init_authenc_job(struct aead_request *req,
1027 struct aead_edesc *edesc,
1028 bool all_contig, bool encrypt)
1030 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1031 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1032 struct caam_aead_alg, aead);
1033 unsigned int ivsize = crypto_aead_ivsize(aead);
1034 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1035 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1036 OP_ALG_AAI_CTR_MOD128);
1037 const bool is_rfc3686 = alg->caam.rfc3686;
1038 u32 *desc = edesc->hw_desc;
1042 * AES-CTR needs to load IV in CONTEXT1 reg
1043 * at an offset of 128bits (16bytes)
1044 * CONTEXT1[255:128] = IV
1051 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1054 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1056 init_aead_job(req, edesc, all_contig, encrypt);
1058 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1059 append_load_as_imm(desc, req->iv, ivsize,
1061 LDST_SRCDST_BYTE_CONTEXT |
1062 (ivoffset << LDST_OFFSET_SHIFT));
1066 * Fill in ablkcipher job descriptor
1068 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
1069 struct ablkcipher_edesc *edesc,
1070 struct ablkcipher_request *req)
1072 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1073 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1074 u32 *desc = edesc->hw_desc;
1075 u32 out_options = 0;
1080 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
1081 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1083 pr_err("asked=%d, nbytes%d\n",
1084 (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes);
1086 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ",
1087 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1088 edesc->src_nents > 1 ? 100 : req->nbytes, 1);
1090 len = desc_len(sh_desc);
1091 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1093 append_seq_in_ptr(desc, edesc->sec4_sg_dma, req->nbytes + ivsize,
1096 if (likely(req->src == req->dst)) {
1097 dst_dma = edesc->sec4_sg_dma + sizeof(struct sec4_sg_entry);
1098 out_options = LDST_SGF;
1100 if (edesc->dst_nents == 1) {
1101 dst_dma = sg_dma_address(req->dst);
1104 dst_dma = edesc->sec4_sg_dma + (edesc->src_nents + 1) *
1105 sizeof(struct sec4_sg_entry);
1106 out_options = LDST_SGF;
1109 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
1113 * Fill in ablkcipher givencrypt job descriptor
1115 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
1116 struct ablkcipher_edesc *edesc,
1117 struct ablkcipher_request *req)
1119 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1120 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1121 u32 *desc = edesc->hw_desc;
1123 dma_addr_t dst_dma, src_dma;
1124 int len, sec4_sg_index = 0;
1127 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
1128 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1131 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ",
1132 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1133 edesc->src_nents > 1 ? 100 : req->nbytes, 1);
1135 len = desc_len(sh_desc);
1136 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1138 if (edesc->src_nents == 1) {
1139 src_dma = sg_dma_address(req->src);
1142 src_dma = edesc->sec4_sg_dma;
1143 sec4_sg_index += edesc->src_nents;
1144 in_options = LDST_SGF;
1146 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
1148 dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1149 sizeof(struct sec4_sg_entry);
1150 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, LDST_SGF);
1154 * allocate and map the aead extended descriptor
1156 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1157 int desc_bytes, bool *all_contig_ptr,
1160 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1161 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1162 struct device *jrdev = ctx->jrdev;
1163 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1164 GFP_KERNEL : GFP_ATOMIC;
1165 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1166 struct aead_edesc *edesc;
1167 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1168 unsigned int authsize = ctx->authsize;
1170 if (unlikely(req->dst != req->src)) {
1171 src_nents = sg_nents_for_len(req->src, req->assoclen +
1173 if (unlikely(src_nents < 0)) {
1174 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1175 req->assoclen + req->cryptlen);
1176 return ERR_PTR(src_nents);
1179 dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1181 (encrypt ? authsize :
1183 if (unlikely(dst_nents < 0)) {
1184 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1185 req->assoclen + req->cryptlen +
1186 (encrypt ? authsize : (-authsize)));
1187 return ERR_PTR(dst_nents);
1190 src_nents = sg_nents_for_len(req->src, req->assoclen +
1192 (encrypt ? authsize : 0));
1193 if (unlikely(src_nents < 0)) {
1194 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1195 req->assoclen + req->cryptlen +
1196 (encrypt ? authsize : 0));
1197 return ERR_PTR(src_nents);
1201 if (likely(req->src == req->dst)) {
1202 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1204 if (unlikely(!mapped_src_nents)) {
1205 dev_err(jrdev, "unable to map source\n");
1206 return ERR_PTR(-ENOMEM);
1209 /* Cover also the case of null (zero length) input data */
1211 mapped_src_nents = dma_map_sg(jrdev, req->src,
1212 src_nents, DMA_TO_DEVICE);
1213 if (unlikely(!mapped_src_nents)) {
1214 dev_err(jrdev, "unable to map source\n");
1215 return ERR_PTR(-ENOMEM);
1218 mapped_src_nents = 0;
1221 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1223 if (unlikely(!mapped_dst_nents)) {
1224 dev_err(jrdev, "unable to map destination\n");
1225 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1226 return ERR_PTR(-ENOMEM);
1230 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1231 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1232 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1234 /* allocate space for base edesc and hw desc commands, link tables */
1235 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1238 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1240 return ERR_PTR(-ENOMEM);
1243 edesc->src_nents = src_nents;
1244 edesc->dst_nents = dst_nents;
1245 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1247 *all_contig_ptr = !(mapped_src_nents > 1);
1250 if (mapped_src_nents > 1) {
1251 sg_to_sec4_sg_last(req->src, mapped_src_nents,
1252 edesc->sec4_sg + sec4_sg_index, 0);
1253 sec4_sg_index += mapped_src_nents;
1255 if (mapped_dst_nents > 1) {
1256 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1257 edesc->sec4_sg + sec4_sg_index, 0);
1263 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1264 sec4_sg_bytes, DMA_TO_DEVICE);
1265 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1266 dev_err(jrdev, "unable to map S/G table\n");
1267 aead_unmap(jrdev, edesc, req);
1269 return ERR_PTR(-ENOMEM);
1272 edesc->sec4_sg_bytes = sec4_sg_bytes;
1277 static int gcm_encrypt(struct aead_request *req)
1279 struct aead_edesc *edesc;
1280 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1281 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1282 struct device *jrdev = ctx->jrdev;
1287 /* allocate extended descriptor */
1288 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
1290 return PTR_ERR(edesc);
1292 /* Create and submit job descriptor */
1293 init_gcm_job(req, edesc, all_contig, true);
1295 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1296 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1297 desc_bytes(edesc->hw_desc), 1);
1300 desc = edesc->hw_desc;
1301 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1305 aead_unmap(jrdev, edesc, req);
1312 static int ipsec_gcm_encrypt(struct aead_request *req)
1314 if (req->assoclen < 8)
1317 return gcm_encrypt(req);
1320 static int aead_encrypt(struct aead_request *req)
1322 struct aead_edesc *edesc;
1323 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1324 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1325 struct device *jrdev = ctx->jrdev;
1330 /* allocate extended descriptor */
1331 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1334 return PTR_ERR(edesc);
1336 /* Create and submit job descriptor */
1337 init_authenc_job(req, edesc, all_contig, true);
1339 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1340 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1341 desc_bytes(edesc->hw_desc), 1);
1344 desc = edesc->hw_desc;
1345 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1349 aead_unmap(jrdev, edesc, req);
1356 static int gcm_decrypt(struct aead_request *req)
1358 struct aead_edesc *edesc;
1359 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1360 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1361 struct device *jrdev = ctx->jrdev;
1366 /* allocate extended descriptor */
1367 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1369 return PTR_ERR(edesc);
1371 /* Create and submit job descriptor*/
1372 init_gcm_job(req, edesc, all_contig, false);
1374 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1375 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1376 desc_bytes(edesc->hw_desc), 1);
1379 desc = edesc->hw_desc;
1380 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1384 aead_unmap(jrdev, edesc, req);
1391 static int ipsec_gcm_decrypt(struct aead_request *req)
1393 if (req->assoclen < 8)
1396 return gcm_decrypt(req);
1399 static int aead_decrypt(struct aead_request *req)
1401 struct aead_edesc *edesc;
1402 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1403 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1404 struct device *jrdev = ctx->jrdev;
1409 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
1410 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1411 req->assoclen + req->cryptlen, 1);
1413 /* allocate extended descriptor */
1414 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1415 &all_contig, false);
1417 return PTR_ERR(edesc);
1419 /* Create and submit job descriptor*/
1420 init_authenc_job(req, edesc, all_contig, false);
1422 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1423 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1424 desc_bytes(edesc->hw_desc), 1);
1427 desc = edesc->hw_desc;
1428 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1432 aead_unmap(jrdev, edesc, req);
1440 * allocate and map the ablkcipher extended descriptor for ablkcipher
1442 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
1443 *req, int desc_bytes)
1445 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1446 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1447 struct device *jrdev = ctx->jrdev;
1448 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1449 GFP_KERNEL : GFP_ATOMIC;
1450 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1451 struct ablkcipher_edesc *edesc;
1454 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1455 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1457 src_nents = sg_nents_for_len(req->src, req->nbytes);
1458 if (unlikely(src_nents < 0)) {
1459 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1461 return ERR_PTR(src_nents);
1464 if (req->dst != req->src) {
1465 dst_nents = sg_nents_for_len(req->dst, req->nbytes);
1466 if (unlikely(dst_nents < 0)) {
1467 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1469 return ERR_PTR(dst_nents);
1473 if (likely(req->src == req->dst)) {
1474 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1476 if (unlikely(!mapped_src_nents)) {
1477 dev_err(jrdev, "unable to map source\n");
1478 return ERR_PTR(-ENOMEM);
1481 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1483 if (unlikely(!mapped_src_nents)) {
1484 dev_err(jrdev, "unable to map source\n");
1485 return ERR_PTR(-ENOMEM);
1488 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1490 if (unlikely(!mapped_dst_nents)) {
1491 dev_err(jrdev, "unable to map destination\n");
1492 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1493 return ERR_PTR(-ENOMEM);
1497 sec4_sg_ents = 1 + mapped_src_nents;
1498 dst_sg_idx = sec4_sg_ents;
1499 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1500 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1503 * allocate space for base edesc and hw desc commands, link tables, IV
1505 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
1508 dev_err(jrdev, "could not allocate extended descriptor\n");
1509 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1511 return ERR_PTR(-ENOMEM);
1514 edesc->src_nents = src_nents;
1515 edesc->dst_nents = dst_nents;
1516 edesc->sec4_sg_bytes = sec4_sg_bytes;
1517 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1519 edesc->iv_dir = DMA_TO_DEVICE;
1521 /* Make sure IV is located in a DMAable area */
1522 iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
1523 memcpy(iv, req->info, ivsize);
1525 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE);
1526 if (dma_mapping_error(jrdev, iv_dma)) {
1527 dev_err(jrdev, "unable to map IV\n");
1528 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1531 return ERR_PTR(-ENOMEM);
1534 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1535 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg + 1, 0);
1537 if (mapped_dst_nents > 1) {
1538 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1539 edesc->sec4_sg + dst_sg_idx, 0);
1542 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1543 sec4_sg_bytes, DMA_TO_DEVICE);
1544 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1545 dev_err(jrdev, "unable to map S/G table\n");
1546 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1547 iv_dma, ivsize, DMA_TO_DEVICE, 0, 0);
1549 return ERR_PTR(-ENOMEM);
1552 edesc->iv_dma = iv_dma;
1555 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
1556 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1563 static int ablkcipher_encrypt(struct ablkcipher_request *req)
1565 struct ablkcipher_edesc *edesc;
1566 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1567 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1568 struct device *jrdev = ctx->jrdev;
1572 /* allocate extended descriptor */
1573 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1575 return PTR_ERR(edesc);
1577 /* Create and submit job descriptor*/
1578 init_ablkcipher_job(ctx->sh_desc_enc, ctx->sh_desc_enc_dma, edesc, req);
1580 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1581 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1582 desc_bytes(edesc->hw_desc), 1);
1584 desc = edesc->hw_desc;
1585 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1590 ablkcipher_unmap(jrdev, edesc, req);
1597 static int ablkcipher_decrypt(struct ablkcipher_request *req)
1599 struct ablkcipher_edesc *edesc;
1600 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1601 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1602 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1603 struct device *jrdev = ctx->jrdev;
1607 /* allocate extended descriptor */
1608 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1610 return PTR_ERR(edesc);
1613 * The crypto API expects us to set the IV (req->info) to the last
1614 * ciphertext block when running in CBC mode.
1616 if ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == OP_ALG_AAI_CBC)
1617 scatterwalk_map_and_copy(req->info, req->src, req->nbytes -
1620 /* Create and submit job descriptor*/
1621 init_ablkcipher_job(ctx->sh_desc_dec, ctx->sh_desc_dec_dma, edesc, req);
1622 desc = edesc->hw_desc;
1624 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1625 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1626 desc_bytes(edesc->hw_desc), 1);
1629 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
1633 ablkcipher_unmap(jrdev, edesc, req);
1641 * allocate and map the ablkcipher extended descriptor
1642 * for ablkcipher givencrypt
1644 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
1645 struct skcipher_givcrypt_request *greq,
1648 struct ablkcipher_request *req = &greq->creq;
1649 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1650 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1651 struct device *jrdev = ctx->jrdev;
1652 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1653 GFP_KERNEL : GFP_ATOMIC;
1654 int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents;
1655 struct ablkcipher_edesc *edesc;
1658 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1659 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1661 src_nents = sg_nents_for_len(req->src, req->nbytes);
1662 if (unlikely(src_nents < 0)) {
1663 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1665 return ERR_PTR(src_nents);
1668 if (likely(req->src == req->dst)) {
1669 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1671 if (unlikely(!mapped_src_nents)) {
1672 dev_err(jrdev, "unable to map source\n");
1673 return ERR_PTR(-ENOMEM);
1676 dst_nents = src_nents;
1677 mapped_dst_nents = src_nents;
1679 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1681 if (unlikely(!mapped_src_nents)) {
1682 dev_err(jrdev, "unable to map source\n");
1683 return ERR_PTR(-ENOMEM);
1686 dst_nents = sg_nents_for_len(req->dst, req->nbytes);
1687 if (unlikely(dst_nents < 0)) {
1688 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1690 return ERR_PTR(dst_nents);
1693 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1695 if (unlikely(!mapped_dst_nents)) {
1696 dev_err(jrdev, "unable to map destination\n");
1697 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1698 return ERR_PTR(-ENOMEM);
1702 sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0;
1703 dst_sg_idx = sec4_sg_ents;
1704 sec4_sg_ents += 1 + mapped_dst_nents;
1707 * allocate space for base edesc and hw desc commands, link tables, IV
1709 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1710 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
1713 dev_err(jrdev, "could not allocate extended descriptor\n");
1714 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1716 return ERR_PTR(-ENOMEM);
1719 edesc->src_nents = src_nents;
1720 edesc->dst_nents = dst_nents;
1721 edesc->sec4_sg_bytes = sec4_sg_bytes;
1722 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1724 edesc->iv_dir = DMA_FROM_DEVICE;
1726 /* Make sure IV is located in a DMAable area */
1727 iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
1728 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_FROM_DEVICE);
1729 if (dma_mapping_error(jrdev, iv_dma)) {
1730 dev_err(jrdev, "unable to map IV\n");
1731 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1734 return ERR_PTR(-ENOMEM);
1737 if (mapped_src_nents > 1)
1738 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg,
1741 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx, iv_dma, ivsize, 0);
1742 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, edesc->sec4_sg +
1745 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1746 sec4_sg_bytes, DMA_TO_DEVICE);
1747 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1748 dev_err(jrdev, "unable to map S/G table\n");
1749 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1750 iv_dma, ivsize, DMA_FROM_DEVICE, 0, 0);
1752 return ERR_PTR(-ENOMEM);
1754 edesc->iv_dma = iv_dma;
1757 print_hex_dump(KERN_ERR,
1758 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
1759 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1766 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
1768 struct ablkcipher_request *req = &creq->creq;
1769 struct ablkcipher_edesc *edesc;
1770 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1771 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1772 struct device *jrdev = ctx->jrdev;
1776 /* allocate extended descriptor */
1777 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1779 return PTR_ERR(edesc);
1781 /* Create and submit job descriptor*/
1782 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
1785 print_hex_dump(KERN_ERR,
1786 "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
1787 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1788 desc_bytes(edesc->hw_desc), 1);
1790 desc = edesc->hw_desc;
1791 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1796 ablkcipher_unmap(jrdev, edesc, req);
1803 #define template_aead template_u.aead
1804 #define template_ablkcipher template_u.ablkcipher
1805 struct caam_alg_template {
1806 char name[CRYPTO_MAX_ALG_NAME];
1807 char driver_name[CRYPTO_MAX_ALG_NAME];
1808 unsigned int blocksize;
1811 struct ablkcipher_alg ablkcipher;
1813 u32 class1_alg_type;
1814 u32 class2_alg_type;
1817 static struct caam_alg_template driver_algs[] = {
1818 /* ablkcipher descriptor */
1821 .driver_name = "cbc-aes-caam",
1822 .blocksize = AES_BLOCK_SIZE,
1823 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1824 .template_ablkcipher = {
1825 .setkey = ablkcipher_setkey,
1826 .encrypt = ablkcipher_encrypt,
1827 .decrypt = ablkcipher_decrypt,
1828 .givencrypt = ablkcipher_givencrypt,
1829 .geniv = "<built-in>",
1830 .min_keysize = AES_MIN_KEY_SIZE,
1831 .max_keysize = AES_MAX_KEY_SIZE,
1832 .ivsize = AES_BLOCK_SIZE,
1834 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1837 .name = "cbc(des3_ede)",
1838 .driver_name = "cbc-3des-caam",
1839 .blocksize = DES3_EDE_BLOCK_SIZE,
1840 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1841 .template_ablkcipher = {
1842 .setkey = ablkcipher_setkey,
1843 .encrypt = ablkcipher_encrypt,
1844 .decrypt = ablkcipher_decrypt,
1845 .givencrypt = ablkcipher_givencrypt,
1846 .geniv = "<built-in>",
1847 .min_keysize = DES3_EDE_KEY_SIZE,
1848 .max_keysize = DES3_EDE_KEY_SIZE,
1849 .ivsize = DES3_EDE_BLOCK_SIZE,
1851 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1855 .driver_name = "cbc-des-caam",
1856 .blocksize = DES_BLOCK_SIZE,
1857 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1858 .template_ablkcipher = {
1859 .setkey = ablkcipher_setkey,
1860 .encrypt = ablkcipher_encrypt,
1861 .decrypt = ablkcipher_decrypt,
1862 .givencrypt = ablkcipher_givencrypt,
1863 .geniv = "<built-in>",
1864 .min_keysize = DES_KEY_SIZE,
1865 .max_keysize = DES_KEY_SIZE,
1866 .ivsize = DES_BLOCK_SIZE,
1868 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1872 .driver_name = "ctr-aes-caam",
1874 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1875 .template_ablkcipher = {
1876 .setkey = ablkcipher_setkey,
1877 .encrypt = ablkcipher_encrypt,
1878 .decrypt = ablkcipher_decrypt,
1880 .min_keysize = AES_MIN_KEY_SIZE,
1881 .max_keysize = AES_MAX_KEY_SIZE,
1882 .ivsize = AES_BLOCK_SIZE,
1884 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
1887 .name = "rfc3686(ctr(aes))",
1888 .driver_name = "rfc3686-ctr-aes-caam",
1890 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1891 .template_ablkcipher = {
1892 .setkey = ablkcipher_setkey,
1893 .encrypt = ablkcipher_encrypt,
1894 .decrypt = ablkcipher_decrypt,
1895 .givencrypt = ablkcipher_givencrypt,
1896 .geniv = "<built-in>",
1897 .min_keysize = AES_MIN_KEY_SIZE +
1898 CTR_RFC3686_NONCE_SIZE,
1899 .max_keysize = AES_MAX_KEY_SIZE +
1900 CTR_RFC3686_NONCE_SIZE,
1901 .ivsize = CTR_RFC3686_IV_SIZE,
1903 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
1907 .driver_name = "xts-aes-caam",
1908 .blocksize = AES_BLOCK_SIZE,
1909 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1910 .template_ablkcipher = {
1911 .setkey = xts_ablkcipher_setkey,
1912 .encrypt = ablkcipher_encrypt,
1913 .decrypt = ablkcipher_decrypt,
1915 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1916 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1917 .ivsize = AES_BLOCK_SIZE,
1919 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1923 static struct caam_aead_alg driver_aeads[] = {
1927 .cra_name = "rfc4106(gcm(aes))",
1928 .cra_driver_name = "rfc4106-gcm-aes-caam",
1931 .setkey = rfc4106_setkey,
1932 .setauthsize = rfc4106_setauthsize,
1933 .encrypt = ipsec_gcm_encrypt,
1934 .decrypt = ipsec_gcm_decrypt,
1936 .maxauthsize = AES_BLOCK_SIZE,
1939 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1945 .cra_name = "rfc4543(gcm(aes))",
1946 .cra_driver_name = "rfc4543-gcm-aes-caam",
1949 .setkey = rfc4543_setkey,
1950 .setauthsize = rfc4543_setauthsize,
1951 .encrypt = ipsec_gcm_encrypt,
1952 .decrypt = ipsec_gcm_decrypt,
1954 .maxauthsize = AES_BLOCK_SIZE,
1957 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1960 /* Galois Counter Mode */
1964 .cra_name = "gcm(aes)",
1965 .cra_driver_name = "gcm-aes-caam",
1968 .setkey = gcm_setkey,
1969 .setauthsize = gcm_setauthsize,
1970 .encrypt = gcm_encrypt,
1971 .decrypt = gcm_decrypt,
1973 .maxauthsize = AES_BLOCK_SIZE,
1976 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1979 /* single-pass ipsec_esp descriptor */
1983 .cra_name = "authenc(hmac(md5),"
1984 "ecb(cipher_null))",
1985 .cra_driver_name = "authenc-hmac-md5-"
1986 "ecb-cipher_null-caam",
1987 .cra_blocksize = NULL_BLOCK_SIZE,
1989 .setkey = aead_setkey,
1990 .setauthsize = aead_setauthsize,
1991 .encrypt = aead_encrypt,
1992 .decrypt = aead_decrypt,
1993 .ivsize = NULL_IV_SIZE,
1994 .maxauthsize = MD5_DIGEST_SIZE,
1997 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
1998 OP_ALG_AAI_HMAC_PRECOMP,
2004 .cra_name = "authenc(hmac(sha1),"
2005 "ecb(cipher_null))",
2006 .cra_driver_name = "authenc-hmac-sha1-"
2007 "ecb-cipher_null-caam",
2008 .cra_blocksize = NULL_BLOCK_SIZE,
2010 .setkey = aead_setkey,
2011 .setauthsize = aead_setauthsize,
2012 .encrypt = aead_encrypt,
2013 .decrypt = aead_decrypt,
2014 .ivsize = NULL_IV_SIZE,
2015 .maxauthsize = SHA1_DIGEST_SIZE,
2018 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2019 OP_ALG_AAI_HMAC_PRECOMP,
2025 .cra_name = "authenc(hmac(sha224),"
2026 "ecb(cipher_null))",
2027 .cra_driver_name = "authenc-hmac-sha224-"
2028 "ecb-cipher_null-caam",
2029 .cra_blocksize = NULL_BLOCK_SIZE,
2031 .setkey = aead_setkey,
2032 .setauthsize = aead_setauthsize,
2033 .encrypt = aead_encrypt,
2034 .decrypt = aead_decrypt,
2035 .ivsize = NULL_IV_SIZE,
2036 .maxauthsize = SHA224_DIGEST_SIZE,
2039 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2040 OP_ALG_AAI_HMAC_PRECOMP,
2046 .cra_name = "authenc(hmac(sha256),"
2047 "ecb(cipher_null))",
2048 .cra_driver_name = "authenc-hmac-sha256-"
2049 "ecb-cipher_null-caam",
2050 .cra_blocksize = NULL_BLOCK_SIZE,
2052 .setkey = aead_setkey,
2053 .setauthsize = aead_setauthsize,
2054 .encrypt = aead_encrypt,
2055 .decrypt = aead_decrypt,
2056 .ivsize = NULL_IV_SIZE,
2057 .maxauthsize = SHA256_DIGEST_SIZE,
2060 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2061 OP_ALG_AAI_HMAC_PRECOMP,
2067 .cra_name = "authenc(hmac(sha384),"
2068 "ecb(cipher_null))",
2069 .cra_driver_name = "authenc-hmac-sha384-"
2070 "ecb-cipher_null-caam",
2071 .cra_blocksize = NULL_BLOCK_SIZE,
2073 .setkey = aead_setkey,
2074 .setauthsize = aead_setauthsize,
2075 .encrypt = aead_encrypt,
2076 .decrypt = aead_decrypt,
2077 .ivsize = NULL_IV_SIZE,
2078 .maxauthsize = SHA384_DIGEST_SIZE,
2081 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2082 OP_ALG_AAI_HMAC_PRECOMP,
2088 .cra_name = "authenc(hmac(sha512),"
2089 "ecb(cipher_null))",
2090 .cra_driver_name = "authenc-hmac-sha512-"
2091 "ecb-cipher_null-caam",
2092 .cra_blocksize = NULL_BLOCK_SIZE,
2094 .setkey = aead_setkey,
2095 .setauthsize = aead_setauthsize,
2096 .encrypt = aead_encrypt,
2097 .decrypt = aead_decrypt,
2098 .ivsize = NULL_IV_SIZE,
2099 .maxauthsize = SHA512_DIGEST_SIZE,
2102 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2103 OP_ALG_AAI_HMAC_PRECOMP,
2109 .cra_name = "authenc(hmac(md5),cbc(aes))",
2110 .cra_driver_name = "authenc-hmac-md5-"
2112 .cra_blocksize = AES_BLOCK_SIZE,
2114 .setkey = aead_setkey,
2115 .setauthsize = aead_setauthsize,
2116 .encrypt = aead_encrypt,
2117 .decrypt = aead_decrypt,
2118 .ivsize = AES_BLOCK_SIZE,
2119 .maxauthsize = MD5_DIGEST_SIZE,
2122 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2123 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2124 OP_ALG_AAI_HMAC_PRECOMP,
2130 .cra_name = "echainiv(authenc(hmac(md5),"
2132 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2134 .cra_blocksize = AES_BLOCK_SIZE,
2136 .setkey = aead_setkey,
2137 .setauthsize = aead_setauthsize,
2138 .encrypt = aead_encrypt,
2139 .decrypt = aead_decrypt,
2140 .ivsize = AES_BLOCK_SIZE,
2141 .maxauthsize = MD5_DIGEST_SIZE,
2144 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2145 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2146 OP_ALG_AAI_HMAC_PRECOMP,
2153 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2154 .cra_driver_name = "authenc-hmac-sha1-"
2156 .cra_blocksize = AES_BLOCK_SIZE,
2158 .setkey = aead_setkey,
2159 .setauthsize = aead_setauthsize,
2160 .encrypt = aead_encrypt,
2161 .decrypt = aead_decrypt,
2162 .ivsize = AES_BLOCK_SIZE,
2163 .maxauthsize = SHA1_DIGEST_SIZE,
2166 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2167 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2168 OP_ALG_AAI_HMAC_PRECOMP,
2174 .cra_name = "echainiv(authenc(hmac(sha1),"
2176 .cra_driver_name = "echainiv-authenc-"
2177 "hmac-sha1-cbc-aes-caam",
2178 .cra_blocksize = AES_BLOCK_SIZE,
2180 .setkey = aead_setkey,
2181 .setauthsize = aead_setauthsize,
2182 .encrypt = aead_encrypt,
2183 .decrypt = aead_decrypt,
2184 .ivsize = AES_BLOCK_SIZE,
2185 .maxauthsize = SHA1_DIGEST_SIZE,
2188 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2189 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2190 OP_ALG_AAI_HMAC_PRECOMP,
2197 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2198 .cra_driver_name = "authenc-hmac-sha224-"
2200 .cra_blocksize = AES_BLOCK_SIZE,
2202 .setkey = aead_setkey,
2203 .setauthsize = aead_setauthsize,
2204 .encrypt = aead_encrypt,
2205 .decrypt = aead_decrypt,
2206 .ivsize = AES_BLOCK_SIZE,
2207 .maxauthsize = SHA224_DIGEST_SIZE,
2210 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2211 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2212 OP_ALG_AAI_HMAC_PRECOMP,
2218 .cra_name = "echainiv(authenc(hmac(sha224),"
2220 .cra_driver_name = "echainiv-authenc-"
2221 "hmac-sha224-cbc-aes-caam",
2222 .cra_blocksize = AES_BLOCK_SIZE,
2224 .setkey = aead_setkey,
2225 .setauthsize = aead_setauthsize,
2226 .encrypt = aead_encrypt,
2227 .decrypt = aead_decrypt,
2228 .ivsize = AES_BLOCK_SIZE,
2229 .maxauthsize = SHA224_DIGEST_SIZE,
2232 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2233 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2234 OP_ALG_AAI_HMAC_PRECOMP,
2241 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2242 .cra_driver_name = "authenc-hmac-sha256-"
2244 .cra_blocksize = AES_BLOCK_SIZE,
2246 .setkey = aead_setkey,
2247 .setauthsize = aead_setauthsize,
2248 .encrypt = aead_encrypt,
2249 .decrypt = aead_decrypt,
2250 .ivsize = AES_BLOCK_SIZE,
2251 .maxauthsize = SHA256_DIGEST_SIZE,
2254 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2255 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2256 OP_ALG_AAI_HMAC_PRECOMP,
2262 .cra_name = "echainiv(authenc(hmac(sha256),"
2264 .cra_driver_name = "echainiv-authenc-"
2265 "hmac-sha256-cbc-aes-caam",
2266 .cra_blocksize = AES_BLOCK_SIZE,
2268 .setkey = aead_setkey,
2269 .setauthsize = aead_setauthsize,
2270 .encrypt = aead_encrypt,
2271 .decrypt = aead_decrypt,
2272 .ivsize = AES_BLOCK_SIZE,
2273 .maxauthsize = SHA256_DIGEST_SIZE,
2276 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2277 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2278 OP_ALG_AAI_HMAC_PRECOMP,
2285 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2286 .cra_driver_name = "authenc-hmac-sha384-"
2288 .cra_blocksize = AES_BLOCK_SIZE,
2290 .setkey = aead_setkey,
2291 .setauthsize = aead_setauthsize,
2292 .encrypt = aead_encrypt,
2293 .decrypt = aead_decrypt,
2294 .ivsize = AES_BLOCK_SIZE,
2295 .maxauthsize = SHA384_DIGEST_SIZE,
2298 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2299 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2300 OP_ALG_AAI_HMAC_PRECOMP,
2306 .cra_name = "echainiv(authenc(hmac(sha384),"
2308 .cra_driver_name = "echainiv-authenc-"
2309 "hmac-sha384-cbc-aes-caam",
2310 .cra_blocksize = AES_BLOCK_SIZE,
2312 .setkey = aead_setkey,
2313 .setauthsize = aead_setauthsize,
2314 .encrypt = aead_encrypt,
2315 .decrypt = aead_decrypt,
2316 .ivsize = AES_BLOCK_SIZE,
2317 .maxauthsize = SHA384_DIGEST_SIZE,
2320 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2321 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2322 OP_ALG_AAI_HMAC_PRECOMP,
2329 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2330 .cra_driver_name = "authenc-hmac-sha512-"
2332 .cra_blocksize = AES_BLOCK_SIZE,
2334 .setkey = aead_setkey,
2335 .setauthsize = aead_setauthsize,
2336 .encrypt = aead_encrypt,
2337 .decrypt = aead_decrypt,
2338 .ivsize = AES_BLOCK_SIZE,
2339 .maxauthsize = SHA512_DIGEST_SIZE,
2342 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2343 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2344 OP_ALG_AAI_HMAC_PRECOMP,
2350 .cra_name = "echainiv(authenc(hmac(sha512),"
2352 .cra_driver_name = "echainiv-authenc-"
2353 "hmac-sha512-cbc-aes-caam",
2354 .cra_blocksize = AES_BLOCK_SIZE,
2356 .setkey = aead_setkey,
2357 .setauthsize = aead_setauthsize,
2358 .encrypt = aead_encrypt,
2359 .decrypt = aead_decrypt,
2360 .ivsize = AES_BLOCK_SIZE,
2361 .maxauthsize = SHA512_DIGEST_SIZE,
2364 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2365 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2366 OP_ALG_AAI_HMAC_PRECOMP,
2373 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2374 .cra_driver_name = "authenc-hmac-md5-"
2375 "cbc-des3_ede-caam",
2376 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2378 .setkey = aead_setkey,
2379 .setauthsize = aead_setauthsize,
2380 .encrypt = aead_encrypt,
2381 .decrypt = aead_decrypt,
2382 .ivsize = DES3_EDE_BLOCK_SIZE,
2383 .maxauthsize = MD5_DIGEST_SIZE,
2386 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2387 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2388 OP_ALG_AAI_HMAC_PRECOMP,
2394 .cra_name = "echainiv(authenc(hmac(md5),"
2396 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2397 "cbc-des3_ede-caam",
2398 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2400 .setkey = aead_setkey,
2401 .setauthsize = aead_setauthsize,
2402 .encrypt = aead_encrypt,
2403 .decrypt = aead_decrypt,
2404 .ivsize = DES3_EDE_BLOCK_SIZE,
2405 .maxauthsize = MD5_DIGEST_SIZE,
2408 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2409 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2410 OP_ALG_AAI_HMAC_PRECOMP,
2417 .cra_name = "authenc(hmac(sha1),"
2419 .cra_driver_name = "authenc-hmac-sha1-"
2420 "cbc-des3_ede-caam",
2421 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2423 .setkey = aead_setkey,
2424 .setauthsize = aead_setauthsize,
2425 .encrypt = aead_encrypt,
2426 .decrypt = aead_decrypt,
2427 .ivsize = DES3_EDE_BLOCK_SIZE,
2428 .maxauthsize = SHA1_DIGEST_SIZE,
2431 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2432 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2433 OP_ALG_AAI_HMAC_PRECOMP,
2439 .cra_name = "echainiv(authenc(hmac(sha1),"
2441 .cra_driver_name = "echainiv-authenc-"
2443 "cbc-des3_ede-caam",
2444 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2446 .setkey = aead_setkey,
2447 .setauthsize = aead_setauthsize,
2448 .encrypt = aead_encrypt,
2449 .decrypt = aead_decrypt,
2450 .ivsize = DES3_EDE_BLOCK_SIZE,
2451 .maxauthsize = SHA1_DIGEST_SIZE,
2454 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2455 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2456 OP_ALG_AAI_HMAC_PRECOMP,
2463 .cra_name = "authenc(hmac(sha224),"
2465 .cra_driver_name = "authenc-hmac-sha224-"
2466 "cbc-des3_ede-caam",
2467 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2469 .setkey = aead_setkey,
2470 .setauthsize = aead_setauthsize,
2471 .encrypt = aead_encrypt,
2472 .decrypt = aead_decrypt,
2473 .ivsize = DES3_EDE_BLOCK_SIZE,
2474 .maxauthsize = SHA224_DIGEST_SIZE,
2477 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2478 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2479 OP_ALG_AAI_HMAC_PRECOMP,
2485 .cra_name = "echainiv(authenc(hmac(sha224),"
2487 .cra_driver_name = "echainiv-authenc-"
2489 "cbc-des3_ede-caam",
2490 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2492 .setkey = aead_setkey,
2493 .setauthsize = aead_setauthsize,
2494 .encrypt = aead_encrypt,
2495 .decrypt = aead_decrypt,
2496 .ivsize = DES3_EDE_BLOCK_SIZE,
2497 .maxauthsize = SHA224_DIGEST_SIZE,
2500 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2501 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2502 OP_ALG_AAI_HMAC_PRECOMP,
2509 .cra_name = "authenc(hmac(sha256),"
2511 .cra_driver_name = "authenc-hmac-sha256-"
2512 "cbc-des3_ede-caam",
2513 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2515 .setkey = aead_setkey,
2516 .setauthsize = aead_setauthsize,
2517 .encrypt = aead_encrypt,
2518 .decrypt = aead_decrypt,
2519 .ivsize = DES3_EDE_BLOCK_SIZE,
2520 .maxauthsize = SHA256_DIGEST_SIZE,
2523 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2524 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2525 OP_ALG_AAI_HMAC_PRECOMP,
2531 .cra_name = "echainiv(authenc(hmac(sha256),"
2533 .cra_driver_name = "echainiv-authenc-"
2535 "cbc-des3_ede-caam",
2536 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2538 .setkey = aead_setkey,
2539 .setauthsize = aead_setauthsize,
2540 .encrypt = aead_encrypt,
2541 .decrypt = aead_decrypt,
2542 .ivsize = DES3_EDE_BLOCK_SIZE,
2543 .maxauthsize = SHA256_DIGEST_SIZE,
2546 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2547 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2548 OP_ALG_AAI_HMAC_PRECOMP,
2555 .cra_name = "authenc(hmac(sha384),"
2557 .cra_driver_name = "authenc-hmac-sha384-"
2558 "cbc-des3_ede-caam",
2559 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2561 .setkey = aead_setkey,
2562 .setauthsize = aead_setauthsize,
2563 .encrypt = aead_encrypt,
2564 .decrypt = aead_decrypt,
2565 .ivsize = DES3_EDE_BLOCK_SIZE,
2566 .maxauthsize = SHA384_DIGEST_SIZE,
2569 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2570 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2571 OP_ALG_AAI_HMAC_PRECOMP,
2577 .cra_name = "echainiv(authenc(hmac(sha384),"
2579 .cra_driver_name = "echainiv-authenc-"
2581 "cbc-des3_ede-caam",
2582 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2584 .setkey = aead_setkey,
2585 .setauthsize = aead_setauthsize,
2586 .encrypt = aead_encrypt,
2587 .decrypt = aead_decrypt,
2588 .ivsize = DES3_EDE_BLOCK_SIZE,
2589 .maxauthsize = SHA384_DIGEST_SIZE,
2592 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2593 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2594 OP_ALG_AAI_HMAC_PRECOMP,
2601 .cra_name = "authenc(hmac(sha512),"
2603 .cra_driver_name = "authenc-hmac-sha512-"
2604 "cbc-des3_ede-caam",
2605 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2607 .setkey = aead_setkey,
2608 .setauthsize = aead_setauthsize,
2609 .encrypt = aead_encrypt,
2610 .decrypt = aead_decrypt,
2611 .ivsize = DES3_EDE_BLOCK_SIZE,
2612 .maxauthsize = SHA512_DIGEST_SIZE,
2615 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2616 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2617 OP_ALG_AAI_HMAC_PRECOMP,
2623 .cra_name = "echainiv(authenc(hmac(sha512),"
2625 .cra_driver_name = "echainiv-authenc-"
2627 "cbc-des3_ede-caam",
2628 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2630 .setkey = aead_setkey,
2631 .setauthsize = aead_setauthsize,
2632 .encrypt = aead_encrypt,
2633 .decrypt = aead_decrypt,
2634 .ivsize = DES3_EDE_BLOCK_SIZE,
2635 .maxauthsize = SHA512_DIGEST_SIZE,
2638 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2639 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2640 OP_ALG_AAI_HMAC_PRECOMP,
2647 .cra_name = "authenc(hmac(md5),cbc(des))",
2648 .cra_driver_name = "authenc-hmac-md5-"
2650 .cra_blocksize = DES_BLOCK_SIZE,
2652 .setkey = aead_setkey,
2653 .setauthsize = aead_setauthsize,
2654 .encrypt = aead_encrypt,
2655 .decrypt = aead_decrypt,
2656 .ivsize = DES_BLOCK_SIZE,
2657 .maxauthsize = MD5_DIGEST_SIZE,
2660 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2661 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2662 OP_ALG_AAI_HMAC_PRECOMP,
2668 .cra_name = "echainiv(authenc(hmac(md5),"
2670 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2672 .cra_blocksize = DES_BLOCK_SIZE,
2674 .setkey = aead_setkey,
2675 .setauthsize = aead_setauthsize,
2676 .encrypt = aead_encrypt,
2677 .decrypt = aead_decrypt,
2678 .ivsize = DES_BLOCK_SIZE,
2679 .maxauthsize = MD5_DIGEST_SIZE,
2682 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2683 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2684 OP_ALG_AAI_HMAC_PRECOMP,
2691 .cra_name = "authenc(hmac(sha1),cbc(des))",
2692 .cra_driver_name = "authenc-hmac-sha1-"
2694 .cra_blocksize = DES_BLOCK_SIZE,
2696 .setkey = aead_setkey,
2697 .setauthsize = aead_setauthsize,
2698 .encrypt = aead_encrypt,
2699 .decrypt = aead_decrypt,
2700 .ivsize = DES_BLOCK_SIZE,
2701 .maxauthsize = SHA1_DIGEST_SIZE,
2704 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2705 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2706 OP_ALG_AAI_HMAC_PRECOMP,
2712 .cra_name = "echainiv(authenc(hmac(sha1),"
2714 .cra_driver_name = "echainiv-authenc-"
2715 "hmac-sha1-cbc-des-caam",
2716 .cra_blocksize = DES_BLOCK_SIZE,
2718 .setkey = aead_setkey,
2719 .setauthsize = aead_setauthsize,
2720 .encrypt = aead_encrypt,
2721 .decrypt = aead_decrypt,
2722 .ivsize = DES_BLOCK_SIZE,
2723 .maxauthsize = SHA1_DIGEST_SIZE,
2726 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2727 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2728 OP_ALG_AAI_HMAC_PRECOMP,
2735 .cra_name = "authenc(hmac(sha224),cbc(des))",
2736 .cra_driver_name = "authenc-hmac-sha224-"
2738 .cra_blocksize = DES_BLOCK_SIZE,
2740 .setkey = aead_setkey,
2741 .setauthsize = aead_setauthsize,
2742 .encrypt = aead_encrypt,
2743 .decrypt = aead_decrypt,
2744 .ivsize = DES_BLOCK_SIZE,
2745 .maxauthsize = SHA224_DIGEST_SIZE,
2748 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2749 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2750 OP_ALG_AAI_HMAC_PRECOMP,
2756 .cra_name = "echainiv(authenc(hmac(sha224),"
2758 .cra_driver_name = "echainiv-authenc-"
2759 "hmac-sha224-cbc-des-caam",
2760 .cra_blocksize = DES_BLOCK_SIZE,
2762 .setkey = aead_setkey,
2763 .setauthsize = aead_setauthsize,
2764 .encrypt = aead_encrypt,
2765 .decrypt = aead_decrypt,
2766 .ivsize = DES_BLOCK_SIZE,
2767 .maxauthsize = SHA224_DIGEST_SIZE,
2770 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2771 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2772 OP_ALG_AAI_HMAC_PRECOMP,
2779 .cra_name = "authenc(hmac(sha256),cbc(des))",
2780 .cra_driver_name = "authenc-hmac-sha256-"
2782 .cra_blocksize = DES_BLOCK_SIZE,
2784 .setkey = aead_setkey,
2785 .setauthsize = aead_setauthsize,
2786 .encrypt = aead_encrypt,
2787 .decrypt = aead_decrypt,
2788 .ivsize = DES_BLOCK_SIZE,
2789 .maxauthsize = SHA256_DIGEST_SIZE,
2792 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2793 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2794 OP_ALG_AAI_HMAC_PRECOMP,
2800 .cra_name = "echainiv(authenc(hmac(sha256),"
2802 .cra_driver_name = "echainiv-authenc-"
2803 "hmac-sha256-cbc-des-caam",
2804 .cra_blocksize = DES_BLOCK_SIZE,
2806 .setkey = aead_setkey,
2807 .setauthsize = aead_setauthsize,
2808 .encrypt = aead_encrypt,
2809 .decrypt = aead_decrypt,
2810 .ivsize = DES_BLOCK_SIZE,
2811 .maxauthsize = SHA256_DIGEST_SIZE,
2814 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2815 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2816 OP_ALG_AAI_HMAC_PRECOMP,
2823 .cra_name = "authenc(hmac(sha384),cbc(des))",
2824 .cra_driver_name = "authenc-hmac-sha384-"
2826 .cra_blocksize = DES_BLOCK_SIZE,
2828 .setkey = aead_setkey,
2829 .setauthsize = aead_setauthsize,
2830 .encrypt = aead_encrypt,
2831 .decrypt = aead_decrypt,
2832 .ivsize = DES_BLOCK_SIZE,
2833 .maxauthsize = SHA384_DIGEST_SIZE,
2836 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2837 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2838 OP_ALG_AAI_HMAC_PRECOMP,
2844 .cra_name = "echainiv(authenc(hmac(sha384),"
2846 .cra_driver_name = "echainiv-authenc-"
2847 "hmac-sha384-cbc-des-caam",
2848 .cra_blocksize = DES_BLOCK_SIZE,
2850 .setkey = aead_setkey,
2851 .setauthsize = aead_setauthsize,
2852 .encrypt = aead_encrypt,
2853 .decrypt = aead_decrypt,
2854 .ivsize = DES_BLOCK_SIZE,
2855 .maxauthsize = SHA384_DIGEST_SIZE,
2858 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2859 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2860 OP_ALG_AAI_HMAC_PRECOMP,
2867 .cra_name = "authenc(hmac(sha512),cbc(des))",
2868 .cra_driver_name = "authenc-hmac-sha512-"
2870 .cra_blocksize = DES_BLOCK_SIZE,
2872 .setkey = aead_setkey,
2873 .setauthsize = aead_setauthsize,
2874 .encrypt = aead_encrypt,
2875 .decrypt = aead_decrypt,
2876 .ivsize = DES_BLOCK_SIZE,
2877 .maxauthsize = SHA512_DIGEST_SIZE,
2880 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2881 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2882 OP_ALG_AAI_HMAC_PRECOMP,
2888 .cra_name = "echainiv(authenc(hmac(sha512),"
2890 .cra_driver_name = "echainiv-authenc-"
2891 "hmac-sha512-cbc-des-caam",
2892 .cra_blocksize = DES_BLOCK_SIZE,
2894 .setkey = aead_setkey,
2895 .setauthsize = aead_setauthsize,
2896 .encrypt = aead_encrypt,
2897 .decrypt = aead_decrypt,
2898 .ivsize = DES_BLOCK_SIZE,
2899 .maxauthsize = SHA512_DIGEST_SIZE,
2902 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2903 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2904 OP_ALG_AAI_HMAC_PRECOMP,
2911 .cra_name = "authenc(hmac(md5),"
2912 "rfc3686(ctr(aes)))",
2913 .cra_driver_name = "authenc-hmac-md5-"
2914 "rfc3686-ctr-aes-caam",
2917 .setkey = aead_setkey,
2918 .setauthsize = aead_setauthsize,
2919 .encrypt = aead_encrypt,
2920 .decrypt = aead_decrypt,
2921 .ivsize = CTR_RFC3686_IV_SIZE,
2922 .maxauthsize = MD5_DIGEST_SIZE,
2925 .class1_alg_type = OP_ALG_ALGSEL_AES |
2926 OP_ALG_AAI_CTR_MOD128,
2927 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2928 OP_ALG_AAI_HMAC_PRECOMP,
2935 .cra_name = "seqiv(authenc("
2936 "hmac(md5),rfc3686(ctr(aes))))",
2937 .cra_driver_name = "seqiv-authenc-hmac-md5-"
2938 "rfc3686-ctr-aes-caam",
2941 .setkey = aead_setkey,
2942 .setauthsize = aead_setauthsize,
2943 .encrypt = aead_encrypt,
2944 .decrypt = aead_decrypt,
2945 .ivsize = CTR_RFC3686_IV_SIZE,
2946 .maxauthsize = MD5_DIGEST_SIZE,
2949 .class1_alg_type = OP_ALG_ALGSEL_AES |
2950 OP_ALG_AAI_CTR_MOD128,
2951 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2952 OP_ALG_AAI_HMAC_PRECOMP,
2960 .cra_name = "authenc(hmac(sha1),"
2961 "rfc3686(ctr(aes)))",
2962 .cra_driver_name = "authenc-hmac-sha1-"
2963 "rfc3686-ctr-aes-caam",
2966 .setkey = aead_setkey,
2967 .setauthsize = aead_setauthsize,
2968 .encrypt = aead_encrypt,
2969 .decrypt = aead_decrypt,
2970 .ivsize = CTR_RFC3686_IV_SIZE,
2971 .maxauthsize = SHA1_DIGEST_SIZE,
2974 .class1_alg_type = OP_ALG_ALGSEL_AES |
2975 OP_ALG_AAI_CTR_MOD128,
2976 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2977 OP_ALG_AAI_HMAC_PRECOMP,
2984 .cra_name = "seqiv(authenc("
2985 "hmac(sha1),rfc3686(ctr(aes))))",
2986 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
2987 "rfc3686-ctr-aes-caam",
2990 .setkey = aead_setkey,
2991 .setauthsize = aead_setauthsize,
2992 .encrypt = aead_encrypt,
2993 .decrypt = aead_decrypt,
2994 .ivsize = CTR_RFC3686_IV_SIZE,
2995 .maxauthsize = SHA1_DIGEST_SIZE,
2998 .class1_alg_type = OP_ALG_ALGSEL_AES |
2999 OP_ALG_AAI_CTR_MOD128,
3000 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3001 OP_ALG_AAI_HMAC_PRECOMP,
3009 .cra_name = "authenc(hmac(sha224),"
3010 "rfc3686(ctr(aes)))",
3011 .cra_driver_name = "authenc-hmac-sha224-"
3012 "rfc3686-ctr-aes-caam",
3015 .setkey = aead_setkey,
3016 .setauthsize = aead_setauthsize,
3017 .encrypt = aead_encrypt,
3018 .decrypt = aead_decrypt,
3019 .ivsize = CTR_RFC3686_IV_SIZE,
3020 .maxauthsize = SHA224_DIGEST_SIZE,
3023 .class1_alg_type = OP_ALG_ALGSEL_AES |
3024 OP_ALG_AAI_CTR_MOD128,
3025 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3026 OP_ALG_AAI_HMAC_PRECOMP,
3033 .cra_name = "seqiv(authenc("
3034 "hmac(sha224),rfc3686(ctr(aes))))",
3035 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3036 "rfc3686-ctr-aes-caam",
3039 .setkey = aead_setkey,
3040 .setauthsize = aead_setauthsize,
3041 .encrypt = aead_encrypt,
3042 .decrypt = aead_decrypt,
3043 .ivsize = CTR_RFC3686_IV_SIZE,
3044 .maxauthsize = SHA224_DIGEST_SIZE,
3047 .class1_alg_type = OP_ALG_ALGSEL_AES |
3048 OP_ALG_AAI_CTR_MOD128,
3049 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3050 OP_ALG_AAI_HMAC_PRECOMP,
3058 .cra_name = "authenc(hmac(sha256),"
3059 "rfc3686(ctr(aes)))",
3060 .cra_driver_name = "authenc-hmac-sha256-"
3061 "rfc3686-ctr-aes-caam",
3064 .setkey = aead_setkey,
3065 .setauthsize = aead_setauthsize,
3066 .encrypt = aead_encrypt,
3067 .decrypt = aead_decrypt,
3068 .ivsize = CTR_RFC3686_IV_SIZE,
3069 .maxauthsize = SHA256_DIGEST_SIZE,
3072 .class1_alg_type = OP_ALG_ALGSEL_AES |
3073 OP_ALG_AAI_CTR_MOD128,
3074 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3075 OP_ALG_AAI_HMAC_PRECOMP,
3082 .cra_name = "seqiv(authenc(hmac(sha256),"
3083 "rfc3686(ctr(aes))))",
3084 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3085 "rfc3686-ctr-aes-caam",
3088 .setkey = aead_setkey,
3089 .setauthsize = aead_setauthsize,
3090 .encrypt = aead_encrypt,
3091 .decrypt = aead_decrypt,
3092 .ivsize = CTR_RFC3686_IV_SIZE,
3093 .maxauthsize = SHA256_DIGEST_SIZE,
3096 .class1_alg_type = OP_ALG_ALGSEL_AES |
3097 OP_ALG_AAI_CTR_MOD128,
3098 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3099 OP_ALG_AAI_HMAC_PRECOMP,
3107 .cra_name = "authenc(hmac(sha384),"
3108 "rfc3686(ctr(aes)))",
3109 .cra_driver_name = "authenc-hmac-sha384-"
3110 "rfc3686-ctr-aes-caam",
3113 .setkey = aead_setkey,
3114 .setauthsize = aead_setauthsize,
3115 .encrypt = aead_encrypt,
3116 .decrypt = aead_decrypt,
3117 .ivsize = CTR_RFC3686_IV_SIZE,
3118 .maxauthsize = SHA384_DIGEST_SIZE,
3121 .class1_alg_type = OP_ALG_ALGSEL_AES |
3122 OP_ALG_AAI_CTR_MOD128,
3123 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3124 OP_ALG_AAI_HMAC_PRECOMP,
3131 .cra_name = "seqiv(authenc(hmac(sha384),"
3132 "rfc3686(ctr(aes))))",
3133 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3134 "rfc3686-ctr-aes-caam",
3137 .setkey = aead_setkey,
3138 .setauthsize = aead_setauthsize,
3139 .encrypt = aead_encrypt,
3140 .decrypt = aead_decrypt,
3141 .ivsize = CTR_RFC3686_IV_SIZE,
3142 .maxauthsize = SHA384_DIGEST_SIZE,
3145 .class1_alg_type = OP_ALG_ALGSEL_AES |
3146 OP_ALG_AAI_CTR_MOD128,
3147 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3148 OP_ALG_AAI_HMAC_PRECOMP,
3156 .cra_name = "authenc(hmac(sha512),"
3157 "rfc3686(ctr(aes)))",
3158 .cra_driver_name = "authenc-hmac-sha512-"
3159 "rfc3686-ctr-aes-caam",
3162 .setkey = aead_setkey,
3163 .setauthsize = aead_setauthsize,
3164 .encrypt = aead_encrypt,
3165 .decrypt = aead_decrypt,
3166 .ivsize = CTR_RFC3686_IV_SIZE,
3167 .maxauthsize = SHA512_DIGEST_SIZE,
3170 .class1_alg_type = OP_ALG_ALGSEL_AES |
3171 OP_ALG_AAI_CTR_MOD128,
3172 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3173 OP_ALG_AAI_HMAC_PRECOMP,
3180 .cra_name = "seqiv(authenc(hmac(sha512),"
3181 "rfc3686(ctr(aes))))",
3182 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3183 "rfc3686-ctr-aes-caam",
3186 .setkey = aead_setkey,
3187 .setauthsize = aead_setauthsize,
3188 .encrypt = aead_encrypt,
3189 .decrypt = aead_decrypt,
3190 .ivsize = CTR_RFC3686_IV_SIZE,
3191 .maxauthsize = SHA512_DIGEST_SIZE,
3194 .class1_alg_type = OP_ALG_ALGSEL_AES |
3195 OP_ALG_AAI_CTR_MOD128,
3196 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3197 OP_ALG_AAI_HMAC_PRECOMP,
3204 struct caam_crypto_alg {
3205 struct crypto_alg crypto_alg;
3206 struct list_head entry;
3207 struct caam_alg_entry caam;
3210 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
3212 dma_addr_t dma_addr;
3214 ctx->jrdev = caam_jr_alloc();
3215 if (IS_ERR(ctx->jrdev)) {
3216 pr_err("Job Ring Device allocation for transform failed\n");
3217 return PTR_ERR(ctx->jrdev);
3220 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3221 offsetof(struct caam_ctx,
3223 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
3224 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3225 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3226 caam_jr_free(ctx->jrdev);
3230 ctx->sh_desc_enc_dma = dma_addr;
3231 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3233 ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx,
3235 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3237 /* copy descriptor header template value */
3238 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3239 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3244 static int caam_cra_init(struct crypto_tfm *tfm)
3246 struct crypto_alg *alg = tfm->__crt_alg;
3247 struct caam_crypto_alg *caam_alg =
3248 container_of(alg, struct caam_crypto_alg, crypto_alg);
3249 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
3251 return caam_init_common(ctx, &caam_alg->caam);
3254 static int caam_aead_init(struct crypto_aead *tfm)
3256 struct aead_alg *alg = crypto_aead_alg(tfm);
3257 struct caam_aead_alg *caam_alg =
3258 container_of(alg, struct caam_aead_alg, aead);
3259 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3261 return caam_init_common(ctx, &caam_alg->caam);
3264 static void caam_exit_common(struct caam_ctx *ctx)
3266 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3267 offsetof(struct caam_ctx, sh_desc_enc_dma),
3268 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
3269 caam_jr_free(ctx->jrdev);
3272 static void caam_cra_exit(struct crypto_tfm *tfm)
3274 caam_exit_common(crypto_tfm_ctx(tfm));
3277 static void caam_aead_exit(struct crypto_aead *tfm)
3279 caam_exit_common(crypto_aead_ctx(tfm));
3282 static void __exit caam_algapi_exit(void)
3285 struct caam_crypto_alg *t_alg, *n;
3288 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3289 struct caam_aead_alg *t_alg = driver_aeads + i;
3291 if (t_alg->registered)
3292 crypto_unregister_aead(&t_alg->aead);
3298 list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
3299 crypto_unregister_alg(&t_alg->crypto_alg);
3300 list_del(&t_alg->entry);
3305 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
3308 struct caam_crypto_alg *t_alg;
3309 struct crypto_alg *alg;
3311 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
3313 pr_err("failed to allocate t_alg\n");
3314 return ERR_PTR(-ENOMEM);
3317 alg = &t_alg->crypto_alg;
3319 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
3320 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
3321 template->driver_name);
3322 alg->cra_module = THIS_MODULE;
3323 alg->cra_init = caam_cra_init;
3324 alg->cra_exit = caam_cra_exit;
3325 alg->cra_priority = CAAM_CRA_PRIORITY;
3326 alg->cra_blocksize = template->blocksize;
3327 alg->cra_alignmask = 0;
3328 alg->cra_ctxsize = sizeof(struct caam_ctx);
3329 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
3331 switch (template->type) {
3332 case CRYPTO_ALG_TYPE_GIVCIPHER:
3333 alg->cra_type = &crypto_givcipher_type;
3334 alg->cra_ablkcipher = template->template_ablkcipher;
3336 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3337 alg->cra_type = &crypto_ablkcipher_type;
3338 alg->cra_ablkcipher = template->template_ablkcipher;
3342 t_alg->caam.class1_alg_type = template->class1_alg_type;
3343 t_alg->caam.class2_alg_type = template->class2_alg_type;
3348 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3350 struct aead_alg *alg = &t_alg->aead;
3352 alg->base.cra_module = THIS_MODULE;
3353 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3354 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3355 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3357 alg->init = caam_aead_init;
3358 alg->exit = caam_aead_exit;
3361 static int __init caam_algapi_init(void)
3363 struct device_node *dev_node;
3364 struct platform_device *pdev;
3365 struct device *ctrldev;
3366 struct caam_drv_private *priv;
3368 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
3369 unsigned int md_limit = SHA512_DIGEST_SIZE;
3370 bool registered = false;
3372 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3374 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3379 pdev = of_find_device_by_node(dev_node);
3381 of_node_put(dev_node);
3385 ctrldev = &pdev->dev;
3386 priv = dev_get_drvdata(ctrldev);
3387 of_node_put(dev_node);
3390 * If priv is NULL, it's probably because the caam driver wasn't
3391 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3397 INIT_LIST_HEAD(&alg_list);
3400 * Register crypto algorithms the device supports.
3401 * First, detect presence and attributes of DES, AES, and MD blocks.
3403 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3404 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3405 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
3406 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
3407 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3409 /* If MD is present, limit digest size based on LP256 */
3410 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
3411 md_limit = SHA256_DIGEST_SIZE;
3413 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3414 struct caam_crypto_alg *t_alg;
3415 struct caam_alg_template *alg = driver_algs + i;
3416 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
3418 /* Skip DES algorithms if not supported by device */
3420 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3421 (alg_sel == OP_ALG_ALGSEL_DES)))
3424 /* Skip AES algorithms if not supported by device */
3425 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3429 * Check support for AES modes not available
3432 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3433 if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
3437 t_alg = caam_alg_alloc(alg);
3438 if (IS_ERR(t_alg)) {
3439 err = PTR_ERR(t_alg);
3440 pr_warn("%s alg allocation failed\n", alg->driver_name);
3444 err = crypto_register_alg(&t_alg->crypto_alg);
3446 pr_warn("%s alg registration failed\n",
3447 t_alg->crypto_alg.cra_driver_name);
3452 list_add_tail(&t_alg->entry, &alg_list);
3456 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3457 struct caam_aead_alg *t_alg = driver_aeads + i;
3458 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3460 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3462 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3464 /* Skip DES algorithms if not supported by device */
3466 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3467 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3470 /* Skip AES algorithms if not supported by device */
3471 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3475 * Check support for AES algorithms not available
3478 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3479 if (alg_aai == OP_ALG_AAI_GCM)
3483 * Skip algorithms requiring message digests
3484 * if MD or MD size is not supported by device.
3487 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
3490 caam_aead_alg_init(t_alg);
3492 err = crypto_register_aead(&t_alg->aead);
3494 pr_warn("%s alg registration failed\n",
3495 t_alg->aead.base.cra_driver_name);
3499 t_alg->registered = true;
3504 pr_info("caam algorithms registered in /proc/crypto\n");
3509 module_init(caam_algapi_init);
3510 module_exit(caam_algapi_exit);
3512 MODULE_LICENSE("GPL");
3513 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3514 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");