2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Based on talitos crypto API driver.
8 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
10 * --------------- ---------------
11 * | JobDesc #1 |-------------------->| ShareDesc |
12 * | *(packet 1) | | (PDB) |
13 * --------------- |------------->| (hashKey) |
15 * . | |-------->| (operation) |
16 * --------------- | | ---------------
17 * | JobDesc #2 |------| |
23 * | JobDesc #3 |------------
27 * The SharedDesc never changes for a connection unless rekeyed, but
28 * each packet will likely be in a different place. So all we need
29 * to know to process the packet is where the input is, where the
30 * output goes, and what context we want to process with. Context is
31 * in the SharedDesc, packet references in the JobDesc.
33 * So, a job desc looks like:
35 * ---------------------
37 * | ShareDesc Pointer |
44 * ---------------------
51 #include "desc_constr.h"
54 #include "sg_sw_sec4.h"
60 #define CAAM_CRA_PRIORITY 3000
61 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
62 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
63 CTR_RFC3686_NONCE_SIZE + \
64 SHA512_DIGEST_SIZE * 2)
65 /* max IV is max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
66 #define CAAM_MAX_IV_LENGTH 16
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 /* length of descriptors text */
75 #define DESC_AEAD_BASE (4 * CAAM_CMD_SZ)
76 #define DESC_AEAD_ENC_LEN (DESC_AEAD_BASE + 11 * CAAM_CMD_SZ)
77 #define DESC_AEAD_DEC_LEN (DESC_AEAD_BASE + 15 * CAAM_CMD_SZ)
78 #define DESC_AEAD_GIVENC_LEN (DESC_AEAD_ENC_LEN + 10 * CAAM_CMD_SZ)
80 /* Note: Nonce is counted in enckeylen */
81 #define DESC_AEAD_CTR_RFC3686_LEN (4 * CAAM_CMD_SZ)
83 #define DESC_AEAD_NULL_BASE (3 * CAAM_CMD_SZ)
84 #define DESC_AEAD_NULL_ENC_LEN (DESC_AEAD_NULL_BASE + 11 * CAAM_CMD_SZ)
85 #define DESC_AEAD_NULL_DEC_LEN (DESC_AEAD_NULL_BASE + 13 * CAAM_CMD_SZ)
87 #define DESC_GCM_BASE (3 * CAAM_CMD_SZ)
88 #define DESC_GCM_ENC_LEN (DESC_GCM_BASE + 16 * CAAM_CMD_SZ)
89 #define DESC_GCM_DEC_LEN (DESC_GCM_BASE + 12 * CAAM_CMD_SZ)
91 #define DESC_RFC4106_BASE (3 * CAAM_CMD_SZ)
92 #define DESC_RFC4106_ENC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
93 #define DESC_RFC4106_DEC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
95 #define DESC_RFC4543_BASE (3 * CAAM_CMD_SZ)
96 #define DESC_RFC4543_ENC_LEN (DESC_RFC4543_BASE + 11 * CAAM_CMD_SZ)
97 #define DESC_RFC4543_DEC_LEN (DESC_RFC4543_BASE + 12 * CAAM_CMD_SZ)
99 #define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ)
100 #define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \
102 #define DESC_ABLKCIPHER_DEC_LEN (DESC_ABLKCIPHER_BASE + \
105 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
106 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
109 /* for print_hex_dumps with line references */
110 #define debug(format, arg...) printk(format, arg)
112 #define debug(format, arg...)
116 #include <linux/highmem.h>
118 static void dbg_dump_sg(const char *level, const char *prefix_str,
119 int prefix_type, int rowsize, int groupsize,
120 struct scatterlist *sg, size_t tlen, bool ascii,
123 struct scatterlist *it;
128 for (it = sg; it != NULL && tlen > 0 ; it = sg_next(sg)) {
130 * make sure the scatterlist's page
131 * has a valid virtual memory mapping
133 it_page = kmap_atomic(sg_page(it));
134 if (unlikely(!it_page)) {
135 printk(KERN_ERR "dbg_dump_sg: kmap failed\n");
139 buf = it_page + it->offset;
140 len = min_t(size_t, tlen, it->length);
141 print_hex_dump(level, prefix_str, prefix_type, rowsize,
142 groupsize, buf, len, ascii);
145 kunmap_atomic(it_page);
150 static struct list_head alg_list;
152 struct caam_alg_entry {
160 struct caam_aead_alg {
161 struct aead_alg aead;
162 struct caam_alg_entry caam;
166 /* Set DK bit in class 1 operation if shared */
167 static inline void append_dec_op1(u32 *desc, u32 type)
169 u32 *jump_cmd, *uncond_jump_cmd;
171 /* DK bit is valid only for AES */
172 if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
173 append_operation(desc, type | OP_ALG_AS_INITFINAL |
178 jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
179 append_operation(desc, type | OP_ALG_AS_INITFINAL |
181 uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
182 set_jump_tgt_here(desc, jump_cmd);
183 append_operation(desc, type | OP_ALG_AS_INITFINAL |
184 OP_ALG_DECRYPT | OP_ALG_AAI_DK);
185 set_jump_tgt_here(desc, uncond_jump_cmd);
189 * For aead functions, read payload and write payload,
190 * both of which are specified in req->src and req->dst
192 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
194 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
195 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
196 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
200 * For ablkcipher encrypt and decrypt, read from req->src and
203 static inline void ablkcipher_append_src_dst(u32 *desc)
205 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
206 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
207 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
208 KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
209 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
213 * per-session context
216 struct device *jrdev;
217 u32 sh_desc_enc[DESC_MAX_USED_LEN];
218 u32 sh_desc_dec[DESC_MAX_USED_LEN];
219 u32 sh_desc_givenc[DESC_MAX_USED_LEN];
220 dma_addr_t sh_desc_enc_dma;
221 dma_addr_t sh_desc_dec_dma;
222 dma_addr_t sh_desc_givenc_dma;
226 u8 key[CAAM_MAX_KEY_SIZE];
228 unsigned int enckeylen;
229 unsigned int split_key_len;
230 unsigned int split_key_pad_len;
231 unsigned int authsize;
234 static void append_key_aead(u32 *desc, struct caam_ctx *ctx,
235 int keys_fit_inline, bool is_rfc3686)
238 unsigned int enckeylen = ctx->enckeylen;
242 * | ctx->key = {AUTH_KEY, ENC_KEY, NONCE}
243 * | enckeylen = encryption key size + nonce size
246 enckeylen -= CTR_RFC3686_NONCE_SIZE;
248 if (keys_fit_inline) {
249 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
250 ctx->split_key_len, CLASS_2 |
251 KEY_DEST_MDHA_SPLIT | KEY_ENC);
252 append_key_as_imm(desc, (void *)ctx->key +
253 ctx->split_key_pad_len, enckeylen,
254 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
256 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
257 KEY_DEST_MDHA_SPLIT | KEY_ENC);
258 append_key(desc, ctx->key_dma + ctx->split_key_pad_len,
259 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
262 /* Load Counter into CONTEXT1 reg */
264 nonce = (u32 *)((void *)ctx->key + ctx->split_key_pad_len +
266 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
268 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
271 MOVE_DEST_CLASS1CTX |
272 (16 << MOVE_OFFSET_SHIFT) |
273 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
277 static void init_sh_desc_key_aead(u32 *desc, struct caam_ctx *ctx,
278 int keys_fit_inline, bool is_rfc3686)
282 /* Note: Context registers are saved. */
283 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
285 /* Skip if already shared */
286 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
289 append_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
291 set_jump_tgt_here(desc, key_jump_cmd);
294 static int aead_null_set_sh_desc(struct crypto_aead *aead)
296 struct caam_ctx *ctx = crypto_aead_ctx(aead);
297 struct device *jrdev = ctx->jrdev;
298 bool keys_fit_inline = false;
299 u32 *key_jump_cmd, *jump_cmd, *read_move_cmd, *write_move_cmd;
303 * Job Descriptor and Shared Descriptors
304 * must all fit into the 64-word Descriptor h/w Buffer
306 if (DESC_AEAD_NULL_ENC_LEN + AEAD_DESC_JOB_IO_LEN +
307 ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
308 keys_fit_inline = true;
310 /* aead_encrypt shared descriptor */
311 desc = ctx->sh_desc_enc;
313 init_sh_desc(desc, HDR_SHARE_SERIAL);
315 /* Skip if already shared */
316 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
319 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
320 ctx->split_key_len, CLASS_2 |
321 KEY_DEST_MDHA_SPLIT | KEY_ENC);
323 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
324 KEY_DEST_MDHA_SPLIT | KEY_ENC);
325 set_jump_tgt_here(desc, key_jump_cmd);
327 /* assoclen + cryptlen = seqinlen */
328 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
330 /* Prepare to read and write cryptlen + assoclen bytes */
331 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
332 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
335 * MOVE_LEN opcode is not available in all SEC HW revisions,
336 * thus need to do some magic, i.e. self-patch the descriptor
339 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
341 (0x6 << MOVE_LEN_SHIFT));
342 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
345 (0x8 << MOVE_LEN_SHIFT));
347 /* Class 2 operation */
348 append_operation(desc, ctx->class2_alg_type |
349 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
351 /* Read and write cryptlen bytes */
352 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
354 set_move_tgt_here(desc, read_move_cmd);
355 set_move_tgt_here(desc, write_move_cmd);
356 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
357 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
361 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
362 LDST_SRCDST_BYTE_CONTEXT);
364 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
367 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
368 dev_err(jrdev, "unable to map shared descriptor\n");
372 print_hex_dump(KERN_ERR,
373 "aead null enc shdesc@"__stringify(__LINE__)": ",
374 DUMP_PREFIX_ADDRESS, 16, 4, desc,
375 desc_bytes(desc), 1);
379 * Job Descriptor and Shared Descriptors
380 * must all fit into the 64-word Descriptor h/w Buffer
382 keys_fit_inline = false;
383 if (DESC_AEAD_NULL_DEC_LEN + DESC_JOB_IO_LEN +
384 ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
385 keys_fit_inline = true;
387 desc = ctx->sh_desc_dec;
389 /* aead_decrypt shared descriptor */
390 init_sh_desc(desc, HDR_SHARE_SERIAL);
392 /* Skip if already shared */
393 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
396 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
397 ctx->split_key_len, CLASS_2 |
398 KEY_DEST_MDHA_SPLIT | KEY_ENC);
400 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
401 KEY_DEST_MDHA_SPLIT | KEY_ENC);
402 set_jump_tgt_here(desc, key_jump_cmd);
404 /* Class 2 operation */
405 append_operation(desc, ctx->class2_alg_type |
406 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
408 /* assoclen + cryptlen = seqoutlen */
409 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
411 /* Prepare to read and write cryptlen + assoclen bytes */
412 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
413 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
416 * MOVE_LEN opcode is not available in all SEC HW revisions,
417 * thus need to do some magic, i.e. self-patch the descriptor
420 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
422 (0x6 << MOVE_LEN_SHIFT));
423 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
426 (0x8 << MOVE_LEN_SHIFT));
428 /* Read and write cryptlen bytes */
429 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
432 * Insert a NOP here, since we need at least 4 instructions between
433 * code patching the descriptor buffer and the location being patched.
435 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
436 set_jump_tgt_here(desc, jump_cmd);
438 set_move_tgt_here(desc, read_move_cmd);
439 set_move_tgt_here(desc, write_move_cmd);
440 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
441 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
443 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
446 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
447 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
449 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
452 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
453 dev_err(jrdev, "unable to map shared descriptor\n");
457 print_hex_dump(KERN_ERR,
458 "aead null dec shdesc@"__stringify(__LINE__)": ",
459 DUMP_PREFIX_ADDRESS, 16, 4, desc,
460 desc_bytes(desc), 1);
466 static int aead_set_sh_desc(struct crypto_aead *aead)
468 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
469 struct caam_aead_alg, aead);
470 unsigned int ivsize = crypto_aead_ivsize(aead);
471 struct caam_ctx *ctx = crypto_aead_ctx(aead);
472 struct device *jrdev = ctx->jrdev;
473 bool keys_fit_inline;
478 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
479 OP_ALG_AAI_CTR_MOD128);
480 const bool is_rfc3686 = alg->caam.rfc3686;
485 /* NULL encryption / decryption */
487 return aead_null_set_sh_desc(aead);
490 * AES-CTR needs to load IV in CONTEXT1 reg
491 * at an offset of 128bits (16bytes)
492 * CONTEXT1[255:128] = IV
499 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
502 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
508 * Job Descriptor and Shared Descriptors
509 * must all fit into the 64-word Descriptor h/w Buffer
511 keys_fit_inline = false;
512 if (DESC_AEAD_ENC_LEN + AUTHENC_DESC_JOB_IO_LEN +
513 ctx->split_key_pad_len + ctx->enckeylen +
514 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
516 keys_fit_inline = true;
518 /* aead_encrypt shared descriptor */
519 desc = ctx->sh_desc_enc;
521 /* Note: Context registers are saved. */
522 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
524 /* Class 2 operation */
525 append_operation(desc, ctx->class2_alg_type |
526 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
528 /* Read and write assoclen bytes */
529 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
530 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
532 /* Skip assoc data */
533 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
535 /* read assoc before reading payload */
536 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
539 /* Load Counter into CONTEXT1 reg */
541 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
542 LDST_SRCDST_BYTE_CONTEXT |
543 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
546 /* Class 1 operation */
547 append_operation(desc, ctx->class1_alg_type |
548 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
550 /* Read and write cryptlen bytes */
551 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
552 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
553 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
556 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
557 LDST_SRCDST_BYTE_CONTEXT);
559 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
562 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
563 dev_err(jrdev, "unable to map shared descriptor\n");
567 print_hex_dump(KERN_ERR, "aead enc shdesc@"__stringify(__LINE__)": ",
568 DUMP_PREFIX_ADDRESS, 16, 4, desc,
569 desc_bytes(desc), 1);
574 * Job Descriptor and Shared Descriptors
575 * must all fit into the 64-word Descriptor h/w Buffer
577 keys_fit_inline = false;
578 if (DESC_AEAD_DEC_LEN + AUTHENC_DESC_JOB_IO_LEN +
579 ctx->split_key_pad_len + ctx->enckeylen +
580 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
582 keys_fit_inline = true;
584 /* aead_decrypt shared descriptor */
585 desc = ctx->sh_desc_dec;
587 /* Note: Context registers are saved. */
588 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
590 /* Class 2 operation */
591 append_operation(desc, ctx->class2_alg_type |
592 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
594 /* Read and write assoclen bytes */
595 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
597 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM, ivsize);
599 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
601 /* Skip assoc data */
602 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
604 /* read assoc before reading payload */
605 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
608 if (alg->caam.geniv) {
609 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
610 LDST_SRCDST_BYTE_CONTEXT |
611 (ctx1_iv_off << LDST_OFFSET_SHIFT));
612 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
613 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
616 /* Load Counter into CONTEXT1 reg */
618 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
619 LDST_SRCDST_BYTE_CONTEXT |
620 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
623 /* Choose operation */
625 append_operation(desc, ctx->class1_alg_type |
626 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
628 append_dec_op1(desc, ctx->class1_alg_type);
630 /* Read and write cryptlen bytes */
631 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
632 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
633 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
636 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
637 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
639 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
642 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
643 dev_err(jrdev, "unable to map shared descriptor\n");
647 print_hex_dump(KERN_ERR, "aead dec shdesc@"__stringify(__LINE__)": ",
648 DUMP_PREFIX_ADDRESS, 16, 4, desc,
649 desc_bytes(desc), 1);
652 if (!alg->caam.geniv)
656 * Job Descriptor and Shared Descriptors
657 * must all fit into the 64-word Descriptor h/w Buffer
659 keys_fit_inline = false;
660 if (DESC_AEAD_GIVENC_LEN + AUTHENC_DESC_JOB_IO_LEN +
661 ctx->split_key_pad_len + ctx->enckeylen +
662 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
664 keys_fit_inline = true;
666 /* aead_givencrypt shared descriptor */
667 desc = ctx->sh_desc_enc;
669 /* Note: Context registers are saved. */
670 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
676 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
677 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
678 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
679 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
680 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
681 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
682 append_move(desc, MOVE_WAITCOMP |
683 MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
684 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
685 (ivsize << MOVE_LEN_SHIFT));
686 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
689 /* Copy IV to class 1 context */
690 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
691 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
692 (ivsize << MOVE_LEN_SHIFT));
694 /* Return to encryption */
695 append_operation(desc, ctx->class2_alg_type |
696 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
698 /* Read and write assoclen bytes */
699 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
700 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
702 /* ivsize + cryptlen = seqoutlen - authsize */
703 append_math_sub_imm_u32(desc, REG3, SEQOUTLEN, IMM, ctx->authsize);
705 /* Skip assoc data */
706 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
708 /* read assoc before reading payload */
709 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
712 /* Copy iv from outfifo to class 2 fifo */
713 moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
714 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
715 append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
716 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
717 append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
718 LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
720 /* Load Counter into CONTEXT1 reg */
722 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
723 LDST_SRCDST_BYTE_CONTEXT |
724 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
727 /* Class 1 operation */
728 append_operation(desc, ctx->class1_alg_type |
729 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
731 /* Will write ivsize + cryptlen */
732 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
734 /* Not need to reload iv */
735 append_seq_fifo_load(desc, ivsize,
738 /* Will read cryptlen */
739 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
742 * Wait for IV transfer (ofifo -> class2) to finish before starting
743 * ciphertext transfer (ofifo -> external memory).
745 wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NIFP);
746 set_jump_tgt_here(desc, wait_cmd);
748 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
749 FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
750 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
753 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
754 LDST_SRCDST_BYTE_CONTEXT);
756 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
759 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
760 dev_err(jrdev, "unable to map shared descriptor\n");
764 print_hex_dump(KERN_ERR, "aead givenc shdesc@"__stringify(__LINE__)": ",
765 DUMP_PREFIX_ADDRESS, 16, 4, desc,
766 desc_bytes(desc), 1);
773 static int aead_setauthsize(struct crypto_aead *authenc,
774 unsigned int authsize)
776 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
778 ctx->authsize = authsize;
779 aead_set_sh_desc(authenc);
784 static int gcm_set_sh_desc(struct crypto_aead *aead)
786 struct caam_ctx *ctx = crypto_aead_ctx(aead);
787 struct device *jrdev = ctx->jrdev;
788 bool keys_fit_inline = false;
789 u32 *key_jump_cmd, *zero_payload_jump_cmd,
790 *zero_assoc_jump_cmd1, *zero_assoc_jump_cmd2;
793 if (!ctx->enckeylen || !ctx->authsize)
797 * AES GCM encrypt shared descriptor
798 * Job Descriptor and Shared Descriptor
799 * must fit into the 64-word Descriptor h/w Buffer
801 if (DESC_GCM_ENC_LEN + GCM_DESC_JOB_IO_LEN +
802 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
803 keys_fit_inline = true;
805 desc = ctx->sh_desc_enc;
807 init_sh_desc(desc, HDR_SHARE_SERIAL);
809 /* skip key loading if they are loaded due to sharing */
810 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
811 JUMP_COND_SHRD | JUMP_COND_SELF);
813 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
814 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
816 append_key(desc, ctx->key_dma, ctx->enckeylen,
817 CLASS_1 | KEY_DEST_CLASS_REG);
818 set_jump_tgt_here(desc, key_jump_cmd);
820 /* class 1 operation */
821 append_operation(desc, ctx->class1_alg_type |
822 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
824 /* if assoclen + cryptlen is ZERO, skip to ICV write */
825 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
826 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
829 /* if assoclen is ZERO, skip reading the assoc data */
830 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
831 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
834 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
836 /* skip assoc data */
837 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
839 /* cryptlen = seqinlen - assoclen */
840 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
842 /* if cryptlen is ZERO jump to zero-payload commands */
843 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
846 /* read assoc data */
847 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
848 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
849 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
851 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
853 /* write encrypted data */
854 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
856 /* read payload data */
857 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
858 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
860 /* jump the zero-payload commands */
861 append_jump(desc, JUMP_TEST_ALL | 2);
863 /* zero-payload commands */
864 set_jump_tgt_here(desc, zero_payload_jump_cmd);
866 /* read assoc data */
867 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
868 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
870 /* There is no input data */
871 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
874 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
875 LDST_SRCDST_BYTE_CONTEXT);
877 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
880 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
881 dev_err(jrdev, "unable to map shared descriptor\n");
885 print_hex_dump(KERN_ERR, "gcm enc shdesc@"__stringify(__LINE__)": ",
886 DUMP_PREFIX_ADDRESS, 16, 4, desc,
887 desc_bytes(desc), 1);
891 * Job Descriptor and Shared Descriptors
892 * must all fit into the 64-word Descriptor h/w Buffer
894 keys_fit_inline = false;
895 if (DESC_GCM_DEC_LEN + GCM_DESC_JOB_IO_LEN +
896 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
897 keys_fit_inline = true;
899 desc = ctx->sh_desc_dec;
901 init_sh_desc(desc, HDR_SHARE_SERIAL);
903 /* skip key loading if they are loaded due to sharing */
904 key_jump_cmd = append_jump(desc, JUMP_JSL |
905 JUMP_TEST_ALL | JUMP_COND_SHRD |
908 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
909 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
911 append_key(desc, ctx->key_dma, ctx->enckeylen,
912 CLASS_1 | KEY_DEST_CLASS_REG);
913 set_jump_tgt_here(desc, key_jump_cmd);
915 /* class 1 operation */
916 append_operation(desc, ctx->class1_alg_type |
917 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
919 /* if assoclen is ZERO, skip reading the assoc data */
920 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
921 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
924 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
926 /* skip assoc data */
927 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
929 /* read assoc data */
930 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
931 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
933 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
935 /* cryptlen = seqoutlen - assoclen */
936 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
938 /* jump to zero-payload command if cryptlen is zero */
939 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
942 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
944 /* store encrypted data */
945 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
947 /* read payload data */
948 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
949 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
951 /* zero-payload command */
952 set_jump_tgt_here(desc, zero_payload_jump_cmd);
955 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
956 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
958 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
961 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
962 dev_err(jrdev, "unable to map shared descriptor\n");
966 print_hex_dump(KERN_ERR, "gcm dec shdesc@"__stringify(__LINE__)": ",
967 DUMP_PREFIX_ADDRESS, 16, 4, desc,
968 desc_bytes(desc), 1);
974 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
976 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
978 ctx->authsize = authsize;
979 gcm_set_sh_desc(authenc);
984 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
986 struct caam_ctx *ctx = crypto_aead_ctx(aead);
987 struct device *jrdev = ctx->jrdev;
988 bool keys_fit_inline = false;
992 if (!ctx->enckeylen || !ctx->authsize)
996 * RFC4106 encrypt shared descriptor
997 * Job Descriptor and Shared Descriptor
998 * must fit into the 64-word Descriptor h/w Buffer
1000 if (DESC_RFC4106_ENC_LEN + GCM_DESC_JOB_IO_LEN +
1001 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1002 keys_fit_inline = true;
1004 desc = ctx->sh_desc_enc;
1006 init_sh_desc(desc, HDR_SHARE_SERIAL);
1008 /* Skip key loading if it is loaded due to sharing */
1009 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1011 if (keys_fit_inline)
1012 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1013 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1015 append_key(desc, ctx->key_dma, ctx->enckeylen,
1016 CLASS_1 | KEY_DEST_CLASS_REG);
1017 set_jump_tgt_here(desc, key_jump_cmd);
1019 /* Class 1 operation */
1020 append_operation(desc, ctx->class1_alg_type |
1021 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1023 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
1024 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1026 /* Read assoc data */
1027 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1028 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1031 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1033 /* Will read cryptlen bytes */
1034 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1036 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1037 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1039 /* Skip assoc data */
1040 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1042 /* cryptlen = seqoutlen - assoclen */
1043 append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
1045 /* Write encrypted data */
1046 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1048 /* Read payload data */
1049 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1050 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1053 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
1054 LDST_SRCDST_BYTE_CONTEXT);
1056 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1059 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1060 dev_err(jrdev, "unable to map shared descriptor\n");
1064 print_hex_dump(KERN_ERR, "rfc4106 enc shdesc@"__stringify(__LINE__)": ",
1065 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1066 desc_bytes(desc), 1);
1070 * Job Descriptor and Shared Descriptors
1071 * must all fit into the 64-word Descriptor h/w Buffer
1073 keys_fit_inline = false;
1074 if (DESC_RFC4106_DEC_LEN + DESC_JOB_IO_LEN +
1075 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1076 keys_fit_inline = true;
1078 desc = ctx->sh_desc_dec;
1080 init_sh_desc(desc, HDR_SHARE_SERIAL);
1082 /* Skip key loading if it is loaded due to sharing */
1083 key_jump_cmd = append_jump(desc, JUMP_JSL |
1084 JUMP_TEST_ALL | JUMP_COND_SHRD);
1085 if (keys_fit_inline)
1086 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1087 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1089 append_key(desc, ctx->key_dma, ctx->enckeylen,
1090 CLASS_1 | KEY_DEST_CLASS_REG);
1091 set_jump_tgt_here(desc, key_jump_cmd);
1093 /* Class 1 operation */
1094 append_operation(desc, ctx->class1_alg_type |
1095 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1097 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
1098 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1100 /* Read assoc data */
1101 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1102 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1105 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1107 /* Will read cryptlen bytes */
1108 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1110 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1111 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1113 /* Skip assoc data */
1114 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1116 /* Will write cryptlen bytes */
1117 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1119 /* Store payload data */
1120 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1122 /* Read encrypted data */
1123 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1124 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1127 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
1128 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1130 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1133 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1134 dev_err(jrdev, "unable to map shared descriptor\n");
1138 print_hex_dump(KERN_ERR, "rfc4106 dec shdesc@"__stringify(__LINE__)": ",
1139 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1140 desc_bytes(desc), 1);
1146 static int rfc4106_setauthsize(struct crypto_aead *authenc,
1147 unsigned int authsize)
1149 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
1151 ctx->authsize = authsize;
1152 rfc4106_set_sh_desc(authenc);
1157 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
1159 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1160 struct device *jrdev = ctx->jrdev;
1161 bool keys_fit_inline = false;
1163 u32 *read_move_cmd, *write_move_cmd;
1166 if (!ctx->enckeylen || !ctx->authsize)
1170 * RFC4543 encrypt shared descriptor
1171 * Job Descriptor and Shared Descriptor
1172 * must fit into the 64-word Descriptor h/w Buffer
1174 if (DESC_RFC4543_ENC_LEN + GCM_DESC_JOB_IO_LEN +
1175 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1176 keys_fit_inline = true;
1178 desc = ctx->sh_desc_enc;
1180 init_sh_desc(desc, HDR_SHARE_SERIAL);
1182 /* Skip key loading if it is loaded due to sharing */
1183 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1185 if (keys_fit_inline)
1186 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1187 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1189 append_key(desc, ctx->key_dma, ctx->enckeylen,
1190 CLASS_1 | KEY_DEST_CLASS_REG);
1191 set_jump_tgt_here(desc, key_jump_cmd);
1193 /* Class 1 operation */
1194 append_operation(desc, ctx->class1_alg_type |
1195 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1197 /* assoclen + cryptlen = seqinlen */
1198 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1201 * MOVE_LEN opcode is not available in all SEC HW revisions,
1202 * thus need to do some magic, i.e. self-patch the descriptor
1205 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1206 (0x6 << MOVE_LEN_SHIFT));
1207 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1208 (0x8 << MOVE_LEN_SHIFT));
1210 /* Will read assoclen + cryptlen bytes */
1211 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1213 /* Will write assoclen + cryptlen bytes */
1214 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1216 /* Read and write assoclen + cryptlen bytes */
1217 aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1219 set_move_tgt_here(desc, read_move_cmd);
1220 set_move_tgt_here(desc, write_move_cmd);
1221 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1222 /* Move payload data to OFIFO */
1223 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1226 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
1227 LDST_SRCDST_BYTE_CONTEXT);
1229 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1232 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1233 dev_err(jrdev, "unable to map shared descriptor\n");
1237 print_hex_dump(KERN_ERR, "rfc4543 enc shdesc@"__stringify(__LINE__)": ",
1238 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1239 desc_bytes(desc), 1);
1243 * Job Descriptor and Shared Descriptors
1244 * must all fit into the 64-word Descriptor h/w Buffer
1246 keys_fit_inline = false;
1247 if (DESC_RFC4543_DEC_LEN + GCM_DESC_JOB_IO_LEN +
1248 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1249 keys_fit_inline = true;
1251 desc = ctx->sh_desc_dec;
1253 init_sh_desc(desc, HDR_SHARE_SERIAL);
1255 /* Skip key loading if it is loaded due to sharing */
1256 key_jump_cmd = append_jump(desc, JUMP_JSL |
1257 JUMP_TEST_ALL | JUMP_COND_SHRD);
1258 if (keys_fit_inline)
1259 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1260 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1262 append_key(desc, ctx->key_dma, ctx->enckeylen,
1263 CLASS_1 | KEY_DEST_CLASS_REG);
1264 set_jump_tgt_here(desc, key_jump_cmd);
1266 /* Class 1 operation */
1267 append_operation(desc, ctx->class1_alg_type |
1268 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1270 /* assoclen + cryptlen = seqoutlen */
1271 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1274 * MOVE_LEN opcode is not available in all SEC HW revisions,
1275 * thus need to do some magic, i.e. self-patch the descriptor
1278 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1279 (0x6 << MOVE_LEN_SHIFT));
1280 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1281 (0x8 << MOVE_LEN_SHIFT));
1283 /* Will read assoclen + cryptlen bytes */
1284 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1286 /* Will write assoclen + cryptlen bytes */
1287 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1289 /* Store payload data */
1290 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1292 /* In-snoop assoclen + cryptlen data */
1293 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1294 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1296 set_move_tgt_here(desc, read_move_cmd);
1297 set_move_tgt_here(desc, write_move_cmd);
1298 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1299 /* Move payload data to OFIFO */
1300 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1301 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1304 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
1305 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1307 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1310 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1311 dev_err(jrdev, "unable to map shared descriptor\n");
1315 print_hex_dump(KERN_ERR, "rfc4543 dec shdesc@"__stringify(__LINE__)": ",
1316 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1317 desc_bytes(desc), 1);
1323 static int rfc4543_setauthsize(struct crypto_aead *authenc,
1324 unsigned int authsize)
1326 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
1328 ctx->authsize = authsize;
1329 rfc4543_set_sh_desc(authenc);
1334 static u32 gen_split_aead_key(struct caam_ctx *ctx, const u8 *key_in,
1337 return gen_split_key(ctx->jrdev, ctx->key, ctx->split_key_len,
1338 ctx->split_key_pad_len, key_in, authkeylen,
1342 static int aead_setkey(struct crypto_aead *aead,
1343 const u8 *key, unsigned int keylen)
1345 /* Sizes for MDHA pads (*not* keys): MD5, SHA1, 224, 256, 384, 512 */
1346 static const u8 mdpadlen[] = { 16, 20, 32, 32, 64, 64 };
1347 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1348 struct device *jrdev = ctx->jrdev;
1349 struct crypto_authenc_keys keys;
1352 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
1355 /* Pick class 2 key length from algorithm submask */
1356 ctx->split_key_len = mdpadlen[(ctx->alg_op & OP_ALG_ALGSEL_SUBMASK) >>
1357 OP_ALG_ALGSEL_SHIFT] * 2;
1358 ctx->split_key_pad_len = ALIGN(ctx->split_key_len, 16);
1360 if (ctx->split_key_pad_len + keys.enckeylen > CAAM_MAX_KEY_SIZE)
1364 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
1365 keys.authkeylen + keys.enckeylen, keys.enckeylen,
1367 printk(KERN_ERR "split_key_len %d split_key_pad_len %d\n",
1368 ctx->split_key_len, ctx->split_key_pad_len);
1369 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1370 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1373 ret = gen_split_aead_key(ctx, keys.authkey, keys.authkeylen);
1378 /* postpend encryption key to auth split key */
1379 memcpy(ctx->key + ctx->split_key_pad_len, keys.enckey, keys.enckeylen);
1381 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->split_key_pad_len +
1382 keys.enckeylen, DMA_TO_DEVICE);
1383 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1384 dev_err(jrdev, "unable to map key i/o memory\n");
1388 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
1389 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
1390 ctx->split_key_pad_len + keys.enckeylen, 1);
1393 ctx->enckeylen = keys.enckeylen;
1395 ret = aead_set_sh_desc(aead);
1397 dma_unmap_single(jrdev, ctx->key_dma, ctx->split_key_pad_len +
1398 keys.enckeylen, DMA_TO_DEVICE);
1403 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
1407 static int gcm_setkey(struct crypto_aead *aead,
1408 const u8 *key, unsigned int keylen)
1410 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1411 struct device *jrdev = ctx->jrdev;
1415 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1416 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1419 memcpy(ctx->key, key, keylen);
1420 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
1422 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1423 dev_err(jrdev, "unable to map key i/o memory\n");
1426 ctx->enckeylen = keylen;
1428 ret = gcm_set_sh_desc(aead);
1430 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1437 static int rfc4106_setkey(struct crypto_aead *aead,
1438 const u8 *key, unsigned int keylen)
1440 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1441 struct device *jrdev = ctx->jrdev;
1448 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1449 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1452 memcpy(ctx->key, key, keylen);
1455 * The last four bytes of the key material are used as the salt value
1456 * in the nonce. Update the AES key length.
1458 ctx->enckeylen = keylen - 4;
1460 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen,
1462 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1463 dev_err(jrdev, "unable to map key i/o memory\n");
1467 ret = rfc4106_set_sh_desc(aead);
1469 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1476 static int rfc4543_setkey(struct crypto_aead *aead,
1477 const u8 *key, unsigned int keylen)
1479 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1480 struct device *jrdev = ctx->jrdev;
1487 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1488 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1491 memcpy(ctx->key, key, keylen);
1494 * The last four bytes of the key material are used as the salt value
1495 * in the nonce. Update the AES key length.
1497 ctx->enckeylen = keylen - 4;
1499 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen,
1501 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1502 dev_err(jrdev, "unable to map key i/o memory\n");
1506 ret = rfc4543_set_sh_desc(aead);
1508 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1515 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
1516 const u8 *key, unsigned int keylen)
1518 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1519 struct ablkcipher_tfm *crt = &ablkcipher->base.crt_ablkcipher;
1520 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
1521 const char *alg_name = crypto_tfm_alg_name(tfm);
1522 struct device *jrdev = ctx->jrdev;
1528 u32 ctx1_iv_off = 0;
1529 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
1530 OP_ALG_AAI_CTR_MOD128);
1531 const bool is_rfc3686 = (ctr_mode &&
1532 (strstr(alg_name, "rfc3686") != NULL));
1535 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1536 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1539 * AES-CTR needs to load IV in CONTEXT1 reg
1540 * at an offset of 128bits (16bytes)
1541 * CONTEXT1[255:128] = IV
1548 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1549 * | *key = {KEY, NONCE}
1552 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
1553 keylen -= CTR_RFC3686_NONCE_SIZE;
1556 memcpy(ctx->key, key, keylen);
1557 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
1559 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1560 dev_err(jrdev, "unable to map key i/o memory\n");
1563 ctx->enckeylen = keylen;
1565 /* ablkcipher_encrypt shared descriptor */
1566 desc = ctx->sh_desc_enc;
1567 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1568 /* Skip if already shared */
1569 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1572 /* Load class1 key only */
1573 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1574 ctx->enckeylen, CLASS_1 |
1575 KEY_DEST_CLASS_REG);
1577 /* Load nonce into CONTEXT1 reg */
1579 nonce = (u8 *)key + keylen;
1580 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1581 LDST_CLASS_IND_CCB |
1582 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1583 append_move(desc, MOVE_WAITCOMP |
1585 MOVE_DEST_CLASS1CTX |
1586 (16 << MOVE_OFFSET_SHIFT) |
1587 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1590 set_jump_tgt_here(desc, key_jump_cmd);
1593 append_seq_load(desc, crt->ivsize, LDST_SRCDST_BYTE_CONTEXT |
1594 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1596 /* Load counter into CONTEXT1 reg */
1598 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1599 LDST_SRCDST_BYTE_CONTEXT |
1600 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1601 LDST_OFFSET_SHIFT));
1603 /* Load operation */
1604 append_operation(desc, ctx->class1_alg_type |
1605 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1607 /* Perform operation */
1608 ablkcipher_append_src_dst(desc);
1610 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1613 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1614 dev_err(jrdev, "unable to map shared descriptor\n");
1618 print_hex_dump(KERN_ERR,
1619 "ablkcipher enc shdesc@"__stringify(__LINE__)": ",
1620 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1621 desc_bytes(desc), 1);
1623 /* ablkcipher_decrypt shared descriptor */
1624 desc = ctx->sh_desc_dec;
1626 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1627 /* Skip if already shared */
1628 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1631 /* Load class1 key only */
1632 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1633 ctx->enckeylen, CLASS_1 |
1634 KEY_DEST_CLASS_REG);
1636 /* Load nonce into CONTEXT1 reg */
1638 nonce = (u8 *)key + keylen;
1639 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1640 LDST_CLASS_IND_CCB |
1641 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1642 append_move(desc, MOVE_WAITCOMP |
1644 MOVE_DEST_CLASS1CTX |
1645 (16 << MOVE_OFFSET_SHIFT) |
1646 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1649 set_jump_tgt_here(desc, key_jump_cmd);
1652 append_seq_load(desc, crt->ivsize, LDST_SRCDST_BYTE_CONTEXT |
1653 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1655 /* Load counter into CONTEXT1 reg */
1657 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1658 LDST_SRCDST_BYTE_CONTEXT |
1659 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1660 LDST_OFFSET_SHIFT));
1662 /* Choose operation */
1664 append_operation(desc, ctx->class1_alg_type |
1665 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
1667 append_dec_op1(desc, ctx->class1_alg_type);
1669 /* Perform operation */
1670 ablkcipher_append_src_dst(desc);
1672 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1675 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1676 dev_err(jrdev, "unable to map shared descriptor\n");
1681 print_hex_dump(KERN_ERR,
1682 "ablkcipher dec shdesc@"__stringify(__LINE__)": ",
1683 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1684 desc_bytes(desc), 1);
1686 /* ablkcipher_givencrypt shared descriptor */
1687 desc = ctx->sh_desc_givenc;
1689 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1690 /* Skip if already shared */
1691 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1694 /* Load class1 key only */
1695 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1696 ctx->enckeylen, CLASS_1 |
1697 KEY_DEST_CLASS_REG);
1699 /* Load Nonce into CONTEXT1 reg */
1701 nonce = (u8 *)key + keylen;
1702 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1703 LDST_CLASS_IND_CCB |
1704 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1705 append_move(desc, MOVE_WAITCOMP |
1707 MOVE_DEST_CLASS1CTX |
1708 (16 << MOVE_OFFSET_SHIFT) |
1709 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1711 set_jump_tgt_here(desc, key_jump_cmd);
1714 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
1715 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
1716 NFIFOENTRY_PTYPE_RND | (crt->ivsize << NFIFOENTRY_DLEN_SHIFT);
1717 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
1718 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
1719 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1720 append_move(desc, MOVE_WAITCOMP |
1722 MOVE_DEST_CLASS1CTX |
1723 (crt->ivsize << MOVE_LEN_SHIFT) |
1724 (ctx1_iv_off << MOVE_OFFSET_SHIFT));
1725 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1727 /* Copy generated IV to memory */
1728 append_seq_store(desc, crt->ivsize,
1729 LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1730 (ctx1_iv_off << LDST_OFFSET_SHIFT));
1732 /* Load Counter into CONTEXT1 reg */
1734 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1735 LDST_SRCDST_BYTE_CONTEXT |
1736 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1737 LDST_OFFSET_SHIFT));
1740 append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
1741 (1 << JUMP_OFFSET_SHIFT));
1743 /* Load operation */
1744 append_operation(desc, ctx->class1_alg_type |
1745 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1747 /* Perform operation */
1748 ablkcipher_append_src_dst(desc);
1750 ctx->sh_desc_givenc_dma = dma_map_single(jrdev, desc,
1753 if (dma_mapping_error(jrdev, ctx->sh_desc_givenc_dma)) {
1754 dev_err(jrdev, "unable to map shared descriptor\n");
1758 print_hex_dump(KERN_ERR,
1759 "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
1760 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1761 desc_bytes(desc), 1);
1767 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
1768 const u8 *key, unsigned int keylen)
1770 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1771 struct device *jrdev = ctx->jrdev;
1772 u32 *key_jump_cmd, *desc;
1773 __be64 sector_size = cpu_to_be64(512);
1775 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
1776 crypto_ablkcipher_set_flags(ablkcipher,
1777 CRYPTO_TFM_RES_BAD_KEY_LEN);
1778 dev_err(jrdev, "key size mismatch\n");
1782 memcpy(ctx->key, key, keylen);
1783 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, DMA_TO_DEVICE);
1784 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1785 dev_err(jrdev, "unable to map key i/o memory\n");
1788 ctx->enckeylen = keylen;
1790 /* xts_ablkcipher_encrypt shared descriptor */
1791 desc = ctx->sh_desc_enc;
1792 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1793 /* Skip if already shared */
1794 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1797 /* Load class1 keys only */
1798 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1799 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1801 /* Load sector size with index 40 bytes (0x28) */
1802 append_cmd(desc, CMD_LOAD | IMMEDIATE | LDST_SRCDST_BYTE_CONTEXT |
1803 LDST_CLASS_1_CCB | (0x28 << LDST_OFFSET_SHIFT) | 8);
1804 append_data(desc, (void *)§or_size, 8);
1806 set_jump_tgt_here(desc, key_jump_cmd);
1809 * create sequence for loading the sector index
1810 * Upper 8B of IV - will be used as sector index
1811 * Lower 8B of IV - will be discarded
1813 append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
1814 LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT) | 8);
1815 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1817 /* Load operation */
1818 append_operation(desc, ctx->class1_alg_type | OP_ALG_AS_INITFINAL |
1821 /* Perform operation */
1822 ablkcipher_append_src_dst(desc);
1824 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
1826 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1827 dev_err(jrdev, "unable to map shared descriptor\n");
1831 print_hex_dump(KERN_ERR,
1832 "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
1833 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1836 /* xts_ablkcipher_decrypt shared descriptor */
1837 desc = ctx->sh_desc_dec;
1839 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1840 /* Skip if already shared */
1841 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1844 /* Load class1 key only */
1845 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1846 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1848 /* Load sector size with index 40 bytes (0x28) */
1849 append_cmd(desc, CMD_LOAD | IMMEDIATE | LDST_SRCDST_BYTE_CONTEXT |
1850 LDST_CLASS_1_CCB | (0x28 << LDST_OFFSET_SHIFT) | 8);
1851 append_data(desc, (void *)§or_size, 8);
1853 set_jump_tgt_here(desc, key_jump_cmd);
1856 * create sequence for loading the sector index
1857 * Upper 8B of IV - will be used as sector index
1858 * Lower 8B of IV - will be discarded
1860 append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
1861 LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT) | 8);
1862 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1864 /* Load operation */
1865 append_dec_op1(desc, ctx->class1_alg_type);
1867 /* Perform operation */
1868 ablkcipher_append_src_dst(desc);
1870 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
1872 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1873 dma_unmap_single(jrdev, ctx->sh_desc_enc_dma,
1874 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
1875 dev_err(jrdev, "unable to map shared descriptor\n");
1879 print_hex_dump(KERN_ERR,
1880 "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
1881 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1888 * aead_edesc - s/w-extended aead descriptor
1889 * @assoc_nents: number of segments in associated data (SPI+Seq) scatterlist
1890 * @src_nents: number of segments in input scatterlist
1891 * @dst_nents: number of segments in output scatterlist
1892 * @iv_dma: dma address of iv for checking continuity and link table
1893 * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1894 * @sec4_sg_bytes: length of dma mapped sec4_sg space
1895 * @sec4_sg_dma: bus physical mapped address of h/w link table
1896 * @hw_desc: the h/w job descriptor followed by any referenced link tables
1904 dma_addr_t sec4_sg_dma;
1905 struct sec4_sg_entry *sec4_sg;
1910 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
1911 * @src_nents: number of segments in input scatterlist
1912 * @dst_nents: number of segments in output scatterlist
1913 * @iv_dma: dma address of iv for checking continuity and link table
1914 * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1915 * @sec4_sg_bytes: length of dma mapped sec4_sg space
1916 * @sec4_sg_dma: bus physical mapped address of h/w link table
1917 * @hw_desc: the h/w job descriptor followed by any referenced link tables
1919 struct ablkcipher_edesc {
1924 dma_addr_t sec4_sg_dma;
1925 struct sec4_sg_entry *sec4_sg;
1929 static void caam_unmap(struct device *dev, struct scatterlist *src,
1930 struct scatterlist *dst, int src_nents,
1932 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
1936 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_TO_DEVICE);
1937 dma_unmap_sg(dev, dst, dst_nents ? : 1, DMA_FROM_DEVICE);
1939 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_BIDIRECTIONAL);
1943 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
1945 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
1949 static void aead_unmap(struct device *dev,
1950 struct aead_edesc *edesc,
1951 struct aead_request *req)
1953 caam_unmap(dev, req->src, req->dst,
1954 edesc->src_nents, edesc->dst_nents, 0, 0,
1955 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
1958 static void ablkcipher_unmap(struct device *dev,
1959 struct ablkcipher_edesc *edesc,
1960 struct ablkcipher_request *req)
1962 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1963 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1965 caam_unmap(dev, req->src, req->dst,
1966 edesc->src_nents, edesc->dst_nents,
1967 edesc->iv_dma, ivsize,
1968 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
1971 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
1974 struct aead_request *req = context;
1975 struct aead_edesc *edesc;
1978 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1981 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
1984 caam_jr_strstatus(jrdev, err);
1986 aead_unmap(jrdev, edesc, req);
1990 aead_request_complete(req, err);
1993 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1996 struct aead_request *req = context;
1997 struct aead_edesc *edesc;
2000 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
2003 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
2006 caam_jr_strstatus(jrdev, err);
2008 aead_unmap(jrdev, edesc, req);
2011 * verify hw auth check passed else return -EBADMSG
2013 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
2018 aead_request_complete(req, err);
2021 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
2024 struct ablkcipher_request *req = context;
2025 struct ablkcipher_edesc *edesc;
2026 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2027 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2028 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2031 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
2034 edesc = (struct ablkcipher_edesc *)((char *)desc -
2035 offsetof(struct ablkcipher_edesc, hw_desc));
2038 caam_jr_strstatus(jrdev, err);
2041 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
2042 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2043 edesc->src_nents > 1 ? 100 : ivsize, 1);
2044 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
2045 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
2046 edesc->dst_nents > 1 ? 100 : req->nbytes, 1, true);
2049 ablkcipher_unmap(jrdev, edesc, req);
2052 * The crypto API expects us to set the IV (req->info) to the last
2053 * ciphertext block when running in CBC mode.
2055 if ((ctx->class1_alg_type & OP_ALG_AAI_MASK) == OP_ALG_AAI_CBC)
2056 scatterwalk_map_and_copy(req->info, req->dst, req->nbytes -
2061 ablkcipher_request_complete(req, err);
2064 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
2067 struct ablkcipher_request *req = context;
2068 struct ablkcipher_edesc *edesc;
2069 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2070 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2071 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2074 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
2077 edesc = (struct ablkcipher_edesc *)((char *)desc -
2078 offsetof(struct ablkcipher_edesc, hw_desc));
2080 caam_jr_strstatus(jrdev, err);
2083 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
2084 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2086 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
2087 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
2088 edesc->dst_nents > 1 ? 100 : req->nbytes, 1, true);
2091 ablkcipher_unmap(jrdev, edesc, req);
2094 * The crypto API expects us to set the IV (req->info) to the last
2095 * ciphertext block when running in CBC mode.
2097 if ((ctx->class1_alg_type & OP_ALG_AAI_MASK) == OP_ALG_AAI_CBC)
2098 scatterwalk_map_and_copy(req->info, req->src, req->nbytes -
2103 ablkcipher_request_complete(req, err);
2107 * Fill in aead job descriptor
2109 static void init_aead_job(struct aead_request *req,
2110 struct aead_edesc *edesc,
2111 bool all_contig, bool encrypt)
2113 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2114 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2115 int authsize = ctx->authsize;
2116 u32 *desc = edesc->hw_desc;
2117 u32 out_options, in_options;
2118 dma_addr_t dst_dma, src_dma;
2119 int len, sec4_sg_index = 0;
2123 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
2124 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
2126 len = desc_len(sh_desc);
2127 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2130 src_dma = sg_dma_address(req->src);
2133 src_dma = edesc->sec4_sg_dma;
2134 sec4_sg_index += edesc->src_nents;
2135 in_options = LDST_SGF;
2138 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
2142 out_options = in_options;
2144 if (unlikely(req->src != req->dst)) {
2145 if (!edesc->dst_nents) {
2146 dst_dma = sg_dma_address(req->dst);
2149 dst_dma = edesc->sec4_sg_dma +
2151 sizeof(struct sec4_sg_entry);
2152 out_options = LDST_SGF;
2157 append_seq_out_ptr(desc, dst_dma,
2158 req->assoclen + req->cryptlen + authsize,
2161 append_seq_out_ptr(desc, dst_dma,
2162 req->assoclen + req->cryptlen - authsize,
2165 /* REG3 = assoclen */
2166 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
2169 static void init_gcm_job(struct aead_request *req,
2170 struct aead_edesc *edesc,
2171 bool all_contig, bool encrypt)
2173 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2174 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2175 unsigned int ivsize = crypto_aead_ivsize(aead);
2176 u32 *desc = edesc->hw_desc;
2177 bool generic_gcm = (ivsize == 12);
2180 init_aead_job(req, edesc, all_contig, encrypt);
2182 /* BUG This should not be specific to generic GCM. */
2184 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
2185 last = FIFOLD_TYPE_LAST1;
2188 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
2189 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
2192 append_data(desc, ctx->key + ctx->enckeylen, 4);
2194 append_data(desc, req->iv, ivsize);
2195 /* End of blank commands */
2198 static void init_authenc_job(struct aead_request *req,
2199 struct aead_edesc *edesc,
2200 bool all_contig, bool encrypt)
2202 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2203 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
2204 struct caam_aead_alg, aead);
2205 unsigned int ivsize = crypto_aead_ivsize(aead);
2206 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2207 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
2208 OP_ALG_AAI_CTR_MOD128);
2209 const bool is_rfc3686 = alg->caam.rfc3686;
2210 u32 *desc = edesc->hw_desc;
2214 * AES-CTR needs to load IV in CONTEXT1 reg
2215 * at an offset of 128bits (16bytes)
2216 * CONTEXT1[255:128] = IV
2223 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
2226 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
2228 init_aead_job(req, edesc, all_contig, encrypt);
2230 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
2231 append_load_as_imm(desc, req->iv, ivsize,
2233 LDST_SRCDST_BYTE_CONTEXT |
2234 (ivoffset << LDST_OFFSET_SHIFT));
2238 * Fill in ablkcipher job descriptor
2240 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
2241 struct ablkcipher_edesc *edesc,
2242 struct ablkcipher_request *req,
2245 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2246 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2247 u32 *desc = edesc->hw_desc;
2248 u32 out_options = 0, in_options;
2249 dma_addr_t dst_dma, src_dma;
2250 int len, sec4_sg_index = 0;
2253 bool may_sleep = ((req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2254 CRYPTO_TFM_REQ_MAY_SLEEP)) != 0);
2255 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
2256 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2258 printk(KERN_ERR "asked=%d, nbytes%d\n", (int)edesc->src_nents ? 100 : req->nbytes, req->nbytes);
2259 dbg_dump_sg(KERN_ERR, "src @"__stringify(__LINE__)": ",
2260 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
2261 edesc->src_nents ? 100 : req->nbytes, 1, may_sleep);
2264 len = desc_len(sh_desc);
2265 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2268 src_dma = edesc->iv_dma;
2271 src_dma = edesc->sec4_sg_dma;
2272 sec4_sg_index += edesc->src_nents + 1;
2273 in_options = LDST_SGF;
2275 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
2277 if (likely(req->src == req->dst)) {
2278 if (!edesc->src_nents && iv_contig) {
2279 dst_dma = sg_dma_address(req->src);
2281 dst_dma = edesc->sec4_sg_dma +
2282 sizeof(struct sec4_sg_entry);
2283 out_options = LDST_SGF;
2286 if (!edesc->dst_nents) {
2287 dst_dma = sg_dma_address(req->dst);
2289 dst_dma = edesc->sec4_sg_dma +
2290 sec4_sg_index * sizeof(struct sec4_sg_entry);
2291 out_options = LDST_SGF;
2294 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
2298 * Fill in ablkcipher givencrypt job descriptor
2300 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
2301 struct ablkcipher_edesc *edesc,
2302 struct ablkcipher_request *req,
2305 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2306 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2307 u32 *desc = edesc->hw_desc;
2308 u32 out_options, in_options;
2309 dma_addr_t dst_dma, src_dma;
2310 int len, sec4_sg_index = 0;
2313 bool may_sleep = ((req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2314 CRYPTO_TFM_REQ_MAY_SLEEP)) != 0);
2315 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
2316 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2318 dbg_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ",
2319 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
2320 edesc->src_nents ? 100 : req->nbytes, 1, may_sleep);
2323 len = desc_len(sh_desc);
2324 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2326 if (!edesc->src_nents) {
2327 src_dma = sg_dma_address(req->src);
2330 src_dma = edesc->sec4_sg_dma;
2331 sec4_sg_index += edesc->src_nents;
2332 in_options = LDST_SGF;
2334 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
2337 dst_dma = edesc->iv_dma;
2340 dst_dma = edesc->sec4_sg_dma +
2341 sec4_sg_index * sizeof(struct sec4_sg_entry);
2342 out_options = LDST_SGF;
2344 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
2348 * allocate and map the aead extended descriptor
2350 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
2351 int desc_bytes, bool *all_contig_ptr,
2354 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2355 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2356 struct device *jrdev = ctx->jrdev;
2357 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2358 CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC;
2359 int src_nents, dst_nents = 0;
2360 struct aead_edesc *edesc;
2362 bool all_contig = true;
2363 int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes;
2364 unsigned int authsize = ctx->authsize;
2366 if (unlikely(req->dst != req->src)) {
2367 src_nents = sg_count(req->src, req->assoclen + req->cryptlen);
2368 dst_nents = sg_count(req->dst,
2369 req->assoclen + req->cryptlen +
2370 (encrypt ? authsize : (-authsize)));
2372 src_nents = sg_count(req->src,
2373 req->assoclen + req->cryptlen +
2374 (encrypt ? authsize : 0));
2377 /* Check if data are contiguous. */
2378 all_contig = !src_nents;
2380 src_nents = src_nents ? : 1;
2381 sec4_sg_len = src_nents;
2384 sec4_sg_len += dst_nents;
2386 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
2388 /* allocate space for base edesc and hw desc commands, link tables */
2389 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2392 dev_err(jrdev, "could not allocate extended descriptor\n");
2393 return ERR_PTR(-ENOMEM);
2396 if (likely(req->src == req->dst)) {
2397 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2399 if (unlikely(!sgc)) {
2400 dev_err(jrdev, "unable to map source\n");
2402 return ERR_PTR(-ENOMEM);
2405 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2407 if (unlikely(!sgc)) {
2408 dev_err(jrdev, "unable to map source\n");
2410 return ERR_PTR(-ENOMEM);
2413 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2415 if (unlikely(!sgc)) {
2416 dev_err(jrdev, "unable to map destination\n");
2417 dma_unmap_sg(jrdev, req->src, src_nents ? : 1,
2420 return ERR_PTR(-ENOMEM);
2424 edesc->src_nents = src_nents;
2425 edesc->dst_nents = dst_nents;
2426 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
2428 *all_contig_ptr = all_contig;
2432 sg_to_sec4_sg_last(req->src, src_nents,
2433 edesc->sec4_sg + sec4_sg_index, 0);
2434 sec4_sg_index += src_nents;
2437 sg_to_sec4_sg_last(req->dst, dst_nents,
2438 edesc->sec4_sg + sec4_sg_index, 0);
2444 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2445 sec4_sg_bytes, DMA_TO_DEVICE);
2446 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2447 dev_err(jrdev, "unable to map S/G table\n");
2448 aead_unmap(jrdev, edesc, req);
2450 return ERR_PTR(-ENOMEM);
2453 edesc->sec4_sg_bytes = sec4_sg_bytes;
2458 static int gcm_encrypt(struct aead_request *req)
2460 struct aead_edesc *edesc;
2461 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2462 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2463 struct device *jrdev = ctx->jrdev;
2468 /* allocate extended descriptor */
2469 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
2471 return PTR_ERR(edesc);
2473 /* Create and submit job descriptor */
2474 init_gcm_job(req, edesc, all_contig, true);
2476 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2477 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2478 desc_bytes(edesc->hw_desc), 1);
2481 desc = edesc->hw_desc;
2482 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2486 aead_unmap(jrdev, edesc, req);
2493 static int ipsec_gcm_encrypt(struct aead_request *req)
2495 if (req->assoclen < 8)
2498 return gcm_encrypt(req);
2501 static int aead_encrypt(struct aead_request *req)
2503 struct aead_edesc *edesc;
2504 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2505 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2506 struct device *jrdev = ctx->jrdev;
2511 /* allocate extended descriptor */
2512 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
2515 return PTR_ERR(edesc);
2517 /* Create and submit job descriptor */
2518 init_authenc_job(req, edesc, all_contig, true);
2520 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2521 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2522 desc_bytes(edesc->hw_desc), 1);
2525 desc = edesc->hw_desc;
2526 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2530 aead_unmap(jrdev, edesc, req);
2537 static int gcm_decrypt(struct aead_request *req)
2539 struct aead_edesc *edesc;
2540 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2541 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2542 struct device *jrdev = ctx->jrdev;
2547 /* allocate extended descriptor */
2548 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
2550 return PTR_ERR(edesc);
2552 /* Create and submit job descriptor*/
2553 init_gcm_job(req, edesc, all_contig, false);
2555 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2556 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2557 desc_bytes(edesc->hw_desc), 1);
2560 desc = edesc->hw_desc;
2561 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2565 aead_unmap(jrdev, edesc, req);
2572 static int ipsec_gcm_decrypt(struct aead_request *req)
2574 if (req->assoclen < 8)
2577 return gcm_decrypt(req);
2580 static int aead_decrypt(struct aead_request *req)
2582 struct aead_edesc *edesc;
2583 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2584 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2585 struct device *jrdev = ctx->jrdev;
2591 bool may_sleep = ((req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2592 CRYPTO_TFM_REQ_MAY_SLEEP)) != 0);
2593 dbg_dump_sg(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
2594 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
2595 req->assoclen + req->cryptlen, 1, may_sleep);
2598 /* allocate extended descriptor */
2599 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
2600 &all_contig, false);
2602 return PTR_ERR(edesc);
2604 /* Create and submit job descriptor*/
2605 init_authenc_job(req, edesc, all_contig, false);
2607 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2608 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2609 desc_bytes(edesc->hw_desc), 1);
2612 desc = edesc->hw_desc;
2613 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2617 aead_unmap(jrdev, edesc, req);
2625 * allocate and map the ablkcipher extended descriptor for ablkcipher
2627 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
2628 *req, int desc_bytes,
2629 bool *iv_contig_out)
2631 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2632 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2633 struct device *jrdev = ctx->jrdev;
2634 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
2635 GFP_KERNEL : GFP_ATOMIC;
2636 int src_nents, dst_nents = 0, sec4_sg_bytes;
2637 struct ablkcipher_edesc *edesc;
2638 dma_addr_t iv_dma = 0;
2639 bool iv_contig = false;
2641 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2644 src_nents = sg_count(req->src, req->nbytes);
2646 if (req->dst != req->src)
2647 dst_nents = sg_count(req->dst, req->nbytes);
2649 if (likely(req->src == req->dst)) {
2650 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2653 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2655 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2659 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
2660 if (dma_mapping_error(jrdev, iv_dma)) {
2661 dev_err(jrdev, "unable to map IV\n");
2662 return ERR_PTR(-ENOMEM);
2666 * Check if iv can be contiguous with source and destination.
2667 * If so, include it. If not, create scatterlist.
2669 if (!src_nents && iv_dma + ivsize == sg_dma_address(req->src))
2672 src_nents = src_nents ? : 1;
2673 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
2674 sizeof(struct sec4_sg_entry);
2676 /* allocate space for base edesc and hw desc commands, link tables */
2677 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2680 dev_err(jrdev, "could not allocate extended descriptor\n");
2681 return ERR_PTR(-ENOMEM);
2684 edesc->src_nents = src_nents;
2685 edesc->dst_nents = dst_nents;
2686 edesc->sec4_sg_bytes = sec4_sg_bytes;
2687 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
2692 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
2693 sg_to_sec4_sg_last(req->src, src_nents,
2694 edesc->sec4_sg + 1, 0);
2695 sec4_sg_index += 1 + src_nents;
2699 sg_to_sec4_sg_last(req->dst, dst_nents,
2700 edesc->sec4_sg + sec4_sg_index, 0);
2703 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2704 sec4_sg_bytes, DMA_TO_DEVICE);
2705 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2706 dev_err(jrdev, "unable to map S/G table\n");
2707 return ERR_PTR(-ENOMEM);
2710 edesc->iv_dma = iv_dma;
2713 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
2714 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
2718 *iv_contig_out = iv_contig;
2722 static int ablkcipher_encrypt(struct ablkcipher_request *req)
2724 struct ablkcipher_edesc *edesc;
2725 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2726 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2727 struct device *jrdev = ctx->jrdev;
2732 /* allocate extended descriptor */
2733 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
2734 CAAM_CMD_SZ, &iv_contig);
2736 return PTR_ERR(edesc);
2738 /* Create and submit job descriptor*/
2739 init_ablkcipher_job(ctx->sh_desc_enc,
2740 ctx->sh_desc_enc_dma, edesc, req, iv_contig);
2742 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
2743 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2744 desc_bytes(edesc->hw_desc), 1);
2746 desc = edesc->hw_desc;
2747 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
2752 ablkcipher_unmap(jrdev, edesc, req);
2759 static int ablkcipher_decrypt(struct ablkcipher_request *req)
2761 struct ablkcipher_edesc *edesc;
2762 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2763 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2764 struct device *jrdev = ctx->jrdev;
2769 /* allocate extended descriptor */
2770 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
2771 CAAM_CMD_SZ, &iv_contig);
2773 return PTR_ERR(edesc);
2775 /* Create and submit job descriptor*/
2776 init_ablkcipher_job(ctx->sh_desc_dec,
2777 ctx->sh_desc_dec_dma, edesc, req, iv_contig);
2778 desc = edesc->hw_desc;
2780 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
2781 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2782 desc_bytes(edesc->hw_desc), 1);
2785 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
2789 ablkcipher_unmap(jrdev, edesc, req);
2797 * allocate and map the ablkcipher extended descriptor
2798 * for ablkcipher givencrypt
2800 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
2801 struct skcipher_givcrypt_request *greq,
2803 bool *iv_contig_out)
2805 struct ablkcipher_request *req = &greq->creq;
2806 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2807 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2808 struct device *jrdev = ctx->jrdev;
2809 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2810 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
2811 GFP_KERNEL : GFP_ATOMIC;
2812 int src_nents, dst_nents = 0, sec4_sg_bytes;
2813 struct ablkcipher_edesc *edesc;
2814 dma_addr_t iv_dma = 0;
2815 bool iv_contig = false;
2817 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2820 src_nents = sg_count(req->src, req->nbytes);
2822 if (unlikely(req->dst != req->src))
2823 dst_nents = sg_count(req->dst, req->nbytes);
2825 if (likely(req->src == req->dst)) {
2826 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2829 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2831 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2836 * Check if iv can be contiguous with source and destination.
2837 * If so, include it. If not, create scatterlist.
2839 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
2840 if (dma_mapping_error(jrdev, iv_dma)) {
2841 dev_err(jrdev, "unable to map IV\n");
2842 return ERR_PTR(-ENOMEM);
2845 if (!dst_nents && iv_dma + ivsize == sg_dma_address(req->dst))
2848 dst_nents = dst_nents ? : 1;
2849 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
2850 sizeof(struct sec4_sg_entry);
2852 /* allocate space for base edesc and hw desc commands, link tables */
2853 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2856 dev_err(jrdev, "could not allocate extended descriptor\n");
2857 return ERR_PTR(-ENOMEM);
2860 edesc->src_nents = src_nents;
2861 edesc->dst_nents = dst_nents;
2862 edesc->sec4_sg_bytes = sec4_sg_bytes;
2863 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
2868 sg_to_sec4_sg_last(req->src, src_nents, edesc->sec4_sg, 0);
2869 sec4_sg_index += src_nents;
2873 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index,
2876 sg_to_sec4_sg_last(req->dst, dst_nents,
2877 edesc->sec4_sg + sec4_sg_index, 0);
2880 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2881 sec4_sg_bytes, DMA_TO_DEVICE);
2882 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2883 dev_err(jrdev, "unable to map S/G table\n");
2884 return ERR_PTR(-ENOMEM);
2886 edesc->iv_dma = iv_dma;
2889 print_hex_dump(KERN_ERR,
2890 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
2891 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
2895 *iv_contig_out = iv_contig;
2899 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
2901 struct ablkcipher_request *req = &creq->creq;
2902 struct ablkcipher_edesc *edesc;
2903 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2904 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2905 struct device *jrdev = ctx->jrdev;
2910 /* allocate extended descriptor */
2911 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
2912 CAAM_CMD_SZ, &iv_contig);
2914 return PTR_ERR(edesc);
2916 /* Create and submit job descriptor*/
2917 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
2918 edesc, req, iv_contig);
2920 print_hex_dump(KERN_ERR,
2921 "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
2922 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2923 desc_bytes(edesc->hw_desc), 1);
2925 desc = edesc->hw_desc;
2926 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
2931 ablkcipher_unmap(jrdev, edesc, req);
2938 #define template_aead template_u.aead
2939 #define template_ablkcipher template_u.ablkcipher
2940 struct caam_alg_template {
2941 char name[CRYPTO_MAX_ALG_NAME];
2942 char driver_name[CRYPTO_MAX_ALG_NAME];
2943 unsigned int blocksize;
2946 struct ablkcipher_alg ablkcipher;
2948 u32 class1_alg_type;
2949 u32 class2_alg_type;
2953 static struct caam_alg_template driver_algs[] = {
2954 /* ablkcipher descriptor */
2957 .driver_name = "cbc-aes-caam",
2958 .blocksize = AES_BLOCK_SIZE,
2959 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2960 .template_ablkcipher = {
2961 .setkey = ablkcipher_setkey,
2962 .encrypt = ablkcipher_encrypt,
2963 .decrypt = ablkcipher_decrypt,
2964 .givencrypt = ablkcipher_givencrypt,
2965 .geniv = "<built-in>",
2966 .min_keysize = AES_MIN_KEY_SIZE,
2967 .max_keysize = AES_MAX_KEY_SIZE,
2968 .ivsize = AES_BLOCK_SIZE,
2970 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2973 .name = "cbc(des3_ede)",
2974 .driver_name = "cbc-3des-caam",
2975 .blocksize = DES3_EDE_BLOCK_SIZE,
2976 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2977 .template_ablkcipher = {
2978 .setkey = ablkcipher_setkey,
2979 .encrypt = ablkcipher_encrypt,
2980 .decrypt = ablkcipher_decrypt,
2981 .givencrypt = ablkcipher_givencrypt,
2982 .geniv = "<built-in>",
2983 .min_keysize = DES3_EDE_KEY_SIZE,
2984 .max_keysize = DES3_EDE_KEY_SIZE,
2985 .ivsize = DES3_EDE_BLOCK_SIZE,
2987 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2991 .driver_name = "cbc-des-caam",
2992 .blocksize = DES_BLOCK_SIZE,
2993 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2994 .template_ablkcipher = {
2995 .setkey = ablkcipher_setkey,
2996 .encrypt = ablkcipher_encrypt,
2997 .decrypt = ablkcipher_decrypt,
2998 .givencrypt = ablkcipher_givencrypt,
2999 .geniv = "<built-in>",
3000 .min_keysize = DES_KEY_SIZE,
3001 .max_keysize = DES_KEY_SIZE,
3002 .ivsize = DES_BLOCK_SIZE,
3004 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3008 .driver_name = "ctr-aes-caam",
3010 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
3011 .template_ablkcipher = {
3012 .setkey = ablkcipher_setkey,
3013 .encrypt = ablkcipher_encrypt,
3014 .decrypt = ablkcipher_decrypt,
3016 .min_keysize = AES_MIN_KEY_SIZE,
3017 .max_keysize = AES_MAX_KEY_SIZE,
3018 .ivsize = AES_BLOCK_SIZE,
3020 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
3023 .name = "rfc3686(ctr(aes))",
3024 .driver_name = "rfc3686-ctr-aes-caam",
3026 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
3027 .template_ablkcipher = {
3028 .setkey = ablkcipher_setkey,
3029 .encrypt = ablkcipher_encrypt,
3030 .decrypt = ablkcipher_decrypt,
3031 .givencrypt = ablkcipher_givencrypt,
3032 .geniv = "<built-in>",
3033 .min_keysize = AES_MIN_KEY_SIZE +
3034 CTR_RFC3686_NONCE_SIZE,
3035 .max_keysize = AES_MAX_KEY_SIZE +
3036 CTR_RFC3686_NONCE_SIZE,
3037 .ivsize = CTR_RFC3686_IV_SIZE,
3039 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
3043 .driver_name = "xts-aes-caam",
3044 .blocksize = AES_BLOCK_SIZE,
3045 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
3046 .template_ablkcipher = {
3047 .setkey = xts_ablkcipher_setkey,
3048 .encrypt = ablkcipher_encrypt,
3049 .decrypt = ablkcipher_decrypt,
3051 .min_keysize = 2 * AES_MIN_KEY_SIZE,
3052 .max_keysize = 2 * AES_MAX_KEY_SIZE,
3053 .ivsize = AES_BLOCK_SIZE,
3055 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
3059 static struct caam_aead_alg driver_aeads[] = {
3063 .cra_name = "rfc4106(gcm(aes))",
3064 .cra_driver_name = "rfc4106-gcm-aes-caam",
3067 .setkey = rfc4106_setkey,
3068 .setauthsize = rfc4106_setauthsize,
3069 .encrypt = ipsec_gcm_encrypt,
3070 .decrypt = ipsec_gcm_decrypt,
3072 .maxauthsize = AES_BLOCK_SIZE,
3075 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3081 .cra_name = "rfc4543(gcm(aes))",
3082 .cra_driver_name = "rfc4543-gcm-aes-caam",
3085 .setkey = rfc4543_setkey,
3086 .setauthsize = rfc4543_setauthsize,
3087 .encrypt = ipsec_gcm_encrypt,
3088 .decrypt = ipsec_gcm_decrypt,
3090 .maxauthsize = AES_BLOCK_SIZE,
3093 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3096 /* Galois Counter Mode */
3100 .cra_name = "gcm(aes)",
3101 .cra_driver_name = "gcm-aes-caam",
3104 .setkey = gcm_setkey,
3105 .setauthsize = gcm_setauthsize,
3106 .encrypt = gcm_encrypt,
3107 .decrypt = gcm_decrypt,
3109 .maxauthsize = AES_BLOCK_SIZE,
3112 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3115 /* single-pass ipsec_esp descriptor */
3119 .cra_name = "authenc(hmac(md5),"
3120 "ecb(cipher_null))",
3121 .cra_driver_name = "authenc-hmac-md5-"
3122 "ecb-cipher_null-caam",
3123 .cra_blocksize = NULL_BLOCK_SIZE,
3125 .setkey = aead_setkey,
3126 .setauthsize = aead_setauthsize,
3127 .encrypt = aead_encrypt,
3128 .decrypt = aead_decrypt,
3129 .ivsize = NULL_IV_SIZE,
3130 .maxauthsize = MD5_DIGEST_SIZE,
3133 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3134 OP_ALG_AAI_HMAC_PRECOMP,
3135 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3141 .cra_name = "authenc(hmac(sha1),"
3142 "ecb(cipher_null))",
3143 .cra_driver_name = "authenc-hmac-sha1-"
3144 "ecb-cipher_null-caam",
3145 .cra_blocksize = NULL_BLOCK_SIZE,
3147 .setkey = aead_setkey,
3148 .setauthsize = aead_setauthsize,
3149 .encrypt = aead_encrypt,
3150 .decrypt = aead_decrypt,
3151 .ivsize = NULL_IV_SIZE,
3152 .maxauthsize = SHA1_DIGEST_SIZE,
3155 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3156 OP_ALG_AAI_HMAC_PRECOMP,
3157 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3163 .cra_name = "authenc(hmac(sha224),"
3164 "ecb(cipher_null))",
3165 .cra_driver_name = "authenc-hmac-sha224-"
3166 "ecb-cipher_null-caam",
3167 .cra_blocksize = NULL_BLOCK_SIZE,
3169 .setkey = aead_setkey,
3170 .setauthsize = aead_setauthsize,
3171 .encrypt = aead_encrypt,
3172 .decrypt = aead_decrypt,
3173 .ivsize = NULL_IV_SIZE,
3174 .maxauthsize = SHA224_DIGEST_SIZE,
3177 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3178 OP_ALG_AAI_HMAC_PRECOMP,
3179 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3185 .cra_name = "authenc(hmac(sha256),"
3186 "ecb(cipher_null))",
3187 .cra_driver_name = "authenc-hmac-sha256-"
3188 "ecb-cipher_null-caam",
3189 .cra_blocksize = NULL_BLOCK_SIZE,
3191 .setkey = aead_setkey,
3192 .setauthsize = aead_setauthsize,
3193 .encrypt = aead_encrypt,
3194 .decrypt = aead_decrypt,
3195 .ivsize = NULL_IV_SIZE,
3196 .maxauthsize = SHA256_DIGEST_SIZE,
3199 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3200 OP_ALG_AAI_HMAC_PRECOMP,
3201 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3207 .cra_name = "authenc(hmac(sha384),"
3208 "ecb(cipher_null))",
3209 .cra_driver_name = "authenc-hmac-sha384-"
3210 "ecb-cipher_null-caam",
3211 .cra_blocksize = NULL_BLOCK_SIZE,
3213 .setkey = aead_setkey,
3214 .setauthsize = aead_setauthsize,
3215 .encrypt = aead_encrypt,
3216 .decrypt = aead_decrypt,
3217 .ivsize = NULL_IV_SIZE,
3218 .maxauthsize = SHA384_DIGEST_SIZE,
3221 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3222 OP_ALG_AAI_HMAC_PRECOMP,
3223 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3229 .cra_name = "authenc(hmac(sha512),"
3230 "ecb(cipher_null))",
3231 .cra_driver_name = "authenc-hmac-sha512-"
3232 "ecb-cipher_null-caam",
3233 .cra_blocksize = NULL_BLOCK_SIZE,
3235 .setkey = aead_setkey,
3236 .setauthsize = aead_setauthsize,
3237 .encrypt = aead_encrypt,
3238 .decrypt = aead_decrypt,
3239 .ivsize = NULL_IV_SIZE,
3240 .maxauthsize = SHA512_DIGEST_SIZE,
3243 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3244 OP_ALG_AAI_HMAC_PRECOMP,
3245 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3251 .cra_name = "authenc(hmac(md5),cbc(aes))",
3252 .cra_driver_name = "authenc-hmac-md5-"
3254 .cra_blocksize = AES_BLOCK_SIZE,
3256 .setkey = aead_setkey,
3257 .setauthsize = aead_setauthsize,
3258 .encrypt = aead_encrypt,
3259 .decrypt = aead_decrypt,
3260 .ivsize = AES_BLOCK_SIZE,
3261 .maxauthsize = MD5_DIGEST_SIZE,
3264 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3265 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3266 OP_ALG_AAI_HMAC_PRECOMP,
3267 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3273 .cra_name = "echainiv(authenc(hmac(md5),"
3275 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3277 .cra_blocksize = AES_BLOCK_SIZE,
3279 .setkey = aead_setkey,
3280 .setauthsize = aead_setauthsize,
3281 .encrypt = aead_encrypt,
3282 .decrypt = aead_decrypt,
3283 .ivsize = AES_BLOCK_SIZE,
3284 .maxauthsize = MD5_DIGEST_SIZE,
3287 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3288 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3289 OP_ALG_AAI_HMAC_PRECOMP,
3290 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3297 .cra_name = "authenc(hmac(sha1),cbc(aes))",
3298 .cra_driver_name = "authenc-hmac-sha1-"
3300 .cra_blocksize = AES_BLOCK_SIZE,
3302 .setkey = aead_setkey,
3303 .setauthsize = aead_setauthsize,
3304 .encrypt = aead_encrypt,
3305 .decrypt = aead_decrypt,
3306 .ivsize = AES_BLOCK_SIZE,
3307 .maxauthsize = SHA1_DIGEST_SIZE,
3310 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3311 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3312 OP_ALG_AAI_HMAC_PRECOMP,
3313 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3319 .cra_name = "echainiv(authenc(hmac(sha1),"
3321 .cra_driver_name = "echainiv-authenc-"
3322 "hmac-sha1-cbc-aes-caam",
3323 .cra_blocksize = AES_BLOCK_SIZE,
3325 .setkey = aead_setkey,
3326 .setauthsize = aead_setauthsize,
3327 .encrypt = aead_encrypt,
3328 .decrypt = aead_decrypt,
3329 .ivsize = AES_BLOCK_SIZE,
3330 .maxauthsize = SHA1_DIGEST_SIZE,
3333 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3334 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3335 OP_ALG_AAI_HMAC_PRECOMP,
3336 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3343 .cra_name = "authenc(hmac(sha224),cbc(aes))",
3344 .cra_driver_name = "authenc-hmac-sha224-"
3346 .cra_blocksize = AES_BLOCK_SIZE,
3348 .setkey = aead_setkey,
3349 .setauthsize = aead_setauthsize,
3350 .encrypt = aead_encrypt,
3351 .decrypt = aead_decrypt,
3352 .ivsize = AES_BLOCK_SIZE,
3353 .maxauthsize = SHA224_DIGEST_SIZE,
3356 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3357 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3358 OP_ALG_AAI_HMAC_PRECOMP,
3359 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3365 .cra_name = "echainiv(authenc(hmac(sha224),"
3367 .cra_driver_name = "echainiv-authenc-"
3368 "hmac-sha224-cbc-aes-caam",
3369 .cra_blocksize = AES_BLOCK_SIZE,
3371 .setkey = aead_setkey,
3372 .setauthsize = aead_setauthsize,
3373 .encrypt = aead_encrypt,
3374 .decrypt = aead_decrypt,
3375 .ivsize = AES_BLOCK_SIZE,
3376 .maxauthsize = SHA224_DIGEST_SIZE,
3379 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3380 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3381 OP_ALG_AAI_HMAC_PRECOMP,
3382 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3389 .cra_name = "authenc(hmac(sha256),cbc(aes))",
3390 .cra_driver_name = "authenc-hmac-sha256-"
3392 .cra_blocksize = AES_BLOCK_SIZE,
3394 .setkey = aead_setkey,
3395 .setauthsize = aead_setauthsize,
3396 .encrypt = aead_encrypt,
3397 .decrypt = aead_decrypt,
3398 .ivsize = AES_BLOCK_SIZE,
3399 .maxauthsize = SHA256_DIGEST_SIZE,
3402 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3403 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3404 OP_ALG_AAI_HMAC_PRECOMP,
3405 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3411 .cra_name = "echainiv(authenc(hmac(sha256),"
3413 .cra_driver_name = "echainiv-authenc-"
3414 "hmac-sha256-cbc-aes-caam",
3415 .cra_blocksize = AES_BLOCK_SIZE,
3417 .setkey = aead_setkey,
3418 .setauthsize = aead_setauthsize,
3419 .encrypt = aead_encrypt,
3420 .decrypt = aead_decrypt,
3421 .ivsize = AES_BLOCK_SIZE,
3422 .maxauthsize = SHA256_DIGEST_SIZE,
3425 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3426 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3427 OP_ALG_AAI_HMAC_PRECOMP,
3428 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3435 .cra_name = "authenc(hmac(sha384),cbc(aes))",
3436 .cra_driver_name = "authenc-hmac-sha384-"
3438 .cra_blocksize = AES_BLOCK_SIZE,
3440 .setkey = aead_setkey,
3441 .setauthsize = aead_setauthsize,
3442 .encrypt = aead_encrypt,
3443 .decrypt = aead_decrypt,
3444 .ivsize = AES_BLOCK_SIZE,
3445 .maxauthsize = SHA384_DIGEST_SIZE,
3448 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3449 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3450 OP_ALG_AAI_HMAC_PRECOMP,
3451 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3457 .cra_name = "echainiv(authenc(hmac(sha384),"
3459 .cra_driver_name = "echainiv-authenc-"
3460 "hmac-sha384-cbc-aes-caam",
3461 .cra_blocksize = AES_BLOCK_SIZE,
3463 .setkey = aead_setkey,
3464 .setauthsize = aead_setauthsize,
3465 .encrypt = aead_encrypt,
3466 .decrypt = aead_decrypt,
3467 .ivsize = AES_BLOCK_SIZE,
3468 .maxauthsize = SHA384_DIGEST_SIZE,
3471 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3472 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3473 OP_ALG_AAI_HMAC_PRECOMP,
3474 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3481 .cra_name = "authenc(hmac(sha512),cbc(aes))",
3482 .cra_driver_name = "authenc-hmac-sha512-"
3484 .cra_blocksize = AES_BLOCK_SIZE,
3486 .setkey = aead_setkey,
3487 .setauthsize = aead_setauthsize,
3488 .encrypt = aead_encrypt,
3489 .decrypt = aead_decrypt,
3490 .ivsize = AES_BLOCK_SIZE,
3491 .maxauthsize = SHA512_DIGEST_SIZE,
3494 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3495 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3496 OP_ALG_AAI_HMAC_PRECOMP,
3497 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3503 .cra_name = "echainiv(authenc(hmac(sha512),"
3505 .cra_driver_name = "echainiv-authenc-"
3506 "hmac-sha512-cbc-aes-caam",
3507 .cra_blocksize = AES_BLOCK_SIZE,
3509 .setkey = aead_setkey,
3510 .setauthsize = aead_setauthsize,
3511 .encrypt = aead_encrypt,
3512 .decrypt = aead_decrypt,
3513 .ivsize = AES_BLOCK_SIZE,
3514 .maxauthsize = SHA512_DIGEST_SIZE,
3517 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3518 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3519 OP_ALG_AAI_HMAC_PRECOMP,
3520 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3527 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
3528 .cra_driver_name = "authenc-hmac-md5-"
3529 "cbc-des3_ede-caam",
3530 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3532 .setkey = aead_setkey,
3533 .setauthsize = aead_setauthsize,
3534 .encrypt = aead_encrypt,
3535 .decrypt = aead_decrypt,
3536 .ivsize = DES3_EDE_BLOCK_SIZE,
3537 .maxauthsize = MD5_DIGEST_SIZE,
3540 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3541 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3542 OP_ALG_AAI_HMAC_PRECOMP,
3543 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3549 .cra_name = "echainiv(authenc(hmac(md5),"
3551 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3552 "cbc-des3_ede-caam",
3553 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3555 .setkey = aead_setkey,
3556 .setauthsize = aead_setauthsize,
3557 .encrypt = aead_encrypt,
3558 .decrypt = aead_decrypt,
3559 .ivsize = DES3_EDE_BLOCK_SIZE,
3560 .maxauthsize = MD5_DIGEST_SIZE,
3563 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3564 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3565 OP_ALG_AAI_HMAC_PRECOMP,
3566 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3573 .cra_name = "authenc(hmac(sha1),"
3575 .cra_driver_name = "authenc-hmac-sha1-"
3576 "cbc-des3_ede-caam",
3577 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3579 .setkey = aead_setkey,
3580 .setauthsize = aead_setauthsize,
3581 .encrypt = aead_encrypt,
3582 .decrypt = aead_decrypt,
3583 .ivsize = DES3_EDE_BLOCK_SIZE,
3584 .maxauthsize = SHA1_DIGEST_SIZE,
3587 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3588 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3589 OP_ALG_AAI_HMAC_PRECOMP,
3590 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3596 .cra_name = "echainiv(authenc(hmac(sha1),"
3598 .cra_driver_name = "echainiv-authenc-"
3600 "cbc-des3_ede-caam",
3601 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3603 .setkey = aead_setkey,
3604 .setauthsize = aead_setauthsize,
3605 .encrypt = aead_encrypt,
3606 .decrypt = aead_decrypt,
3607 .ivsize = DES3_EDE_BLOCK_SIZE,
3608 .maxauthsize = SHA1_DIGEST_SIZE,
3611 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3612 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3613 OP_ALG_AAI_HMAC_PRECOMP,
3614 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3621 .cra_name = "authenc(hmac(sha224),"
3623 .cra_driver_name = "authenc-hmac-sha224-"
3624 "cbc-des3_ede-caam",
3625 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3627 .setkey = aead_setkey,
3628 .setauthsize = aead_setauthsize,
3629 .encrypt = aead_encrypt,
3630 .decrypt = aead_decrypt,
3631 .ivsize = DES3_EDE_BLOCK_SIZE,
3632 .maxauthsize = SHA224_DIGEST_SIZE,
3635 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3636 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3637 OP_ALG_AAI_HMAC_PRECOMP,
3638 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3644 .cra_name = "echainiv(authenc(hmac(sha224),"
3646 .cra_driver_name = "echainiv-authenc-"
3648 "cbc-des3_ede-caam",
3649 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3651 .setkey = aead_setkey,
3652 .setauthsize = aead_setauthsize,
3653 .encrypt = aead_encrypt,
3654 .decrypt = aead_decrypt,
3655 .ivsize = DES3_EDE_BLOCK_SIZE,
3656 .maxauthsize = SHA224_DIGEST_SIZE,
3659 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3660 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3661 OP_ALG_AAI_HMAC_PRECOMP,
3662 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3669 .cra_name = "authenc(hmac(sha256),"
3671 .cra_driver_name = "authenc-hmac-sha256-"
3672 "cbc-des3_ede-caam",
3673 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3675 .setkey = aead_setkey,
3676 .setauthsize = aead_setauthsize,
3677 .encrypt = aead_encrypt,
3678 .decrypt = aead_decrypt,
3679 .ivsize = DES3_EDE_BLOCK_SIZE,
3680 .maxauthsize = SHA256_DIGEST_SIZE,
3683 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3684 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3685 OP_ALG_AAI_HMAC_PRECOMP,
3686 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3692 .cra_name = "echainiv(authenc(hmac(sha256),"
3694 .cra_driver_name = "echainiv-authenc-"
3696 "cbc-des3_ede-caam",
3697 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3699 .setkey = aead_setkey,
3700 .setauthsize = aead_setauthsize,
3701 .encrypt = aead_encrypt,
3702 .decrypt = aead_decrypt,
3703 .ivsize = DES3_EDE_BLOCK_SIZE,
3704 .maxauthsize = SHA256_DIGEST_SIZE,
3707 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3708 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3709 OP_ALG_AAI_HMAC_PRECOMP,
3710 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3717 .cra_name = "authenc(hmac(sha384),"
3719 .cra_driver_name = "authenc-hmac-sha384-"
3720 "cbc-des3_ede-caam",
3721 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3723 .setkey = aead_setkey,
3724 .setauthsize = aead_setauthsize,
3725 .encrypt = aead_encrypt,
3726 .decrypt = aead_decrypt,
3727 .ivsize = DES3_EDE_BLOCK_SIZE,
3728 .maxauthsize = SHA384_DIGEST_SIZE,
3731 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3732 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3733 OP_ALG_AAI_HMAC_PRECOMP,
3734 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3740 .cra_name = "echainiv(authenc(hmac(sha384),"
3742 .cra_driver_name = "echainiv-authenc-"
3744 "cbc-des3_ede-caam",
3745 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3747 .setkey = aead_setkey,
3748 .setauthsize = aead_setauthsize,
3749 .encrypt = aead_encrypt,
3750 .decrypt = aead_decrypt,
3751 .ivsize = DES3_EDE_BLOCK_SIZE,
3752 .maxauthsize = SHA384_DIGEST_SIZE,
3755 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3756 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3757 OP_ALG_AAI_HMAC_PRECOMP,
3758 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3765 .cra_name = "authenc(hmac(sha512),"
3767 .cra_driver_name = "authenc-hmac-sha512-"
3768 "cbc-des3_ede-caam",
3769 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3771 .setkey = aead_setkey,
3772 .setauthsize = aead_setauthsize,
3773 .encrypt = aead_encrypt,
3774 .decrypt = aead_decrypt,
3775 .ivsize = DES3_EDE_BLOCK_SIZE,
3776 .maxauthsize = SHA512_DIGEST_SIZE,
3779 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3780 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3781 OP_ALG_AAI_HMAC_PRECOMP,
3782 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3788 .cra_name = "echainiv(authenc(hmac(sha512),"
3790 .cra_driver_name = "echainiv-authenc-"
3792 "cbc-des3_ede-caam",
3793 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3795 .setkey = aead_setkey,
3796 .setauthsize = aead_setauthsize,
3797 .encrypt = aead_encrypt,
3798 .decrypt = aead_decrypt,
3799 .ivsize = DES3_EDE_BLOCK_SIZE,
3800 .maxauthsize = SHA512_DIGEST_SIZE,
3803 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3804 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3805 OP_ALG_AAI_HMAC_PRECOMP,
3806 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3813 .cra_name = "authenc(hmac(md5),cbc(des))",
3814 .cra_driver_name = "authenc-hmac-md5-"
3816 .cra_blocksize = DES_BLOCK_SIZE,
3818 .setkey = aead_setkey,
3819 .setauthsize = aead_setauthsize,
3820 .encrypt = aead_encrypt,
3821 .decrypt = aead_decrypt,
3822 .ivsize = DES_BLOCK_SIZE,
3823 .maxauthsize = MD5_DIGEST_SIZE,
3826 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3827 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3828 OP_ALG_AAI_HMAC_PRECOMP,
3829 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3835 .cra_name = "echainiv(authenc(hmac(md5),"
3837 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3839 .cra_blocksize = DES_BLOCK_SIZE,
3841 .setkey = aead_setkey,
3842 .setauthsize = aead_setauthsize,
3843 .encrypt = aead_encrypt,
3844 .decrypt = aead_decrypt,
3845 .ivsize = DES_BLOCK_SIZE,
3846 .maxauthsize = MD5_DIGEST_SIZE,
3849 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3850 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3851 OP_ALG_AAI_HMAC_PRECOMP,
3852 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3859 .cra_name = "authenc(hmac(sha1),cbc(des))",
3860 .cra_driver_name = "authenc-hmac-sha1-"
3862 .cra_blocksize = DES_BLOCK_SIZE,
3864 .setkey = aead_setkey,
3865 .setauthsize = aead_setauthsize,
3866 .encrypt = aead_encrypt,
3867 .decrypt = aead_decrypt,
3868 .ivsize = DES_BLOCK_SIZE,
3869 .maxauthsize = SHA1_DIGEST_SIZE,
3872 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3873 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3874 OP_ALG_AAI_HMAC_PRECOMP,
3875 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3881 .cra_name = "echainiv(authenc(hmac(sha1),"
3883 .cra_driver_name = "echainiv-authenc-"
3884 "hmac-sha1-cbc-des-caam",
3885 .cra_blocksize = DES_BLOCK_SIZE,
3887 .setkey = aead_setkey,
3888 .setauthsize = aead_setauthsize,
3889 .encrypt = aead_encrypt,
3890 .decrypt = aead_decrypt,
3891 .ivsize = DES_BLOCK_SIZE,
3892 .maxauthsize = SHA1_DIGEST_SIZE,
3895 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3896 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3897 OP_ALG_AAI_HMAC_PRECOMP,
3898 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3905 .cra_name = "authenc(hmac(sha224),cbc(des))",
3906 .cra_driver_name = "authenc-hmac-sha224-"
3908 .cra_blocksize = DES_BLOCK_SIZE,
3910 .setkey = aead_setkey,
3911 .setauthsize = aead_setauthsize,
3912 .encrypt = aead_encrypt,
3913 .decrypt = aead_decrypt,
3914 .ivsize = DES_BLOCK_SIZE,
3915 .maxauthsize = SHA224_DIGEST_SIZE,
3918 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3919 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3920 OP_ALG_AAI_HMAC_PRECOMP,
3921 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3927 .cra_name = "echainiv(authenc(hmac(sha224),"
3929 .cra_driver_name = "echainiv-authenc-"
3930 "hmac-sha224-cbc-des-caam",
3931 .cra_blocksize = DES_BLOCK_SIZE,
3933 .setkey = aead_setkey,
3934 .setauthsize = aead_setauthsize,
3935 .encrypt = aead_encrypt,
3936 .decrypt = aead_decrypt,
3937 .ivsize = DES_BLOCK_SIZE,
3938 .maxauthsize = SHA224_DIGEST_SIZE,
3941 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3942 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3943 OP_ALG_AAI_HMAC_PRECOMP,
3944 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3951 .cra_name = "authenc(hmac(sha256),cbc(des))",
3952 .cra_driver_name = "authenc-hmac-sha256-"
3954 .cra_blocksize = DES_BLOCK_SIZE,
3956 .setkey = aead_setkey,
3957 .setauthsize = aead_setauthsize,
3958 .encrypt = aead_encrypt,
3959 .decrypt = aead_decrypt,
3960 .ivsize = DES_BLOCK_SIZE,
3961 .maxauthsize = SHA256_DIGEST_SIZE,
3964 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3965 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3966 OP_ALG_AAI_HMAC_PRECOMP,
3967 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3973 .cra_name = "echainiv(authenc(hmac(sha256),"
3975 .cra_driver_name = "echainiv-authenc-"
3976 "hmac-sha256-cbc-des-caam",
3977 .cra_blocksize = DES_BLOCK_SIZE,
3979 .setkey = aead_setkey,
3980 .setauthsize = aead_setauthsize,
3981 .encrypt = aead_encrypt,
3982 .decrypt = aead_decrypt,
3983 .ivsize = DES_BLOCK_SIZE,
3984 .maxauthsize = SHA256_DIGEST_SIZE,
3987 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3988 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3989 OP_ALG_AAI_HMAC_PRECOMP,
3990 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3997 .cra_name = "authenc(hmac(sha384),cbc(des))",
3998 .cra_driver_name = "authenc-hmac-sha384-"
4000 .cra_blocksize = DES_BLOCK_SIZE,
4002 .setkey = aead_setkey,
4003 .setauthsize = aead_setauthsize,
4004 .encrypt = aead_encrypt,
4005 .decrypt = aead_decrypt,
4006 .ivsize = DES_BLOCK_SIZE,
4007 .maxauthsize = SHA384_DIGEST_SIZE,
4010 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
4011 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4012 OP_ALG_AAI_HMAC_PRECOMP,
4013 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4019 .cra_name = "echainiv(authenc(hmac(sha384),"
4021 .cra_driver_name = "echainiv-authenc-"
4022 "hmac-sha384-cbc-des-caam",
4023 .cra_blocksize = DES_BLOCK_SIZE,
4025 .setkey = aead_setkey,
4026 .setauthsize = aead_setauthsize,
4027 .encrypt = aead_encrypt,
4028 .decrypt = aead_decrypt,
4029 .ivsize = DES_BLOCK_SIZE,
4030 .maxauthsize = SHA384_DIGEST_SIZE,
4033 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
4034 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4035 OP_ALG_AAI_HMAC_PRECOMP,
4036 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4043 .cra_name = "authenc(hmac(sha512),cbc(des))",
4044 .cra_driver_name = "authenc-hmac-sha512-"
4046 .cra_blocksize = DES_BLOCK_SIZE,
4048 .setkey = aead_setkey,
4049 .setauthsize = aead_setauthsize,
4050 .encrypt = aead_encrypt,
4051 .decrypt = aead_decrypt,
4052 .ivsize = DES_BLOCK_SIZE,
4053 .maxauthsize = SHA512_DIGEST_SIZE,
4056 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
4057 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4058 OP_ALG_AAI_HMAC_PRECOMP,
4059 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4065 .cra_name = "echainiv(authenc(hmac(sha512),"
4067 .cra_driver_name = "echainiv-authenc-"
4068 "hmac-sha512-cbc-des-caam",
4069 .cra_blocksize = DES_BLOCK_SIZE,
4071 .setkey = aead_setkey,
4072 .setauthsize = aead_setauthsize,
4073 .encrypt = aead_encrypt,
4074 .decrypt = aead_decrypt,
4075 .ivsize = DES_BLOCK_SIZE,
4076 .maxauthsize = SHA512_DIGEST_SIZE,
4079 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
4080 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4081 OP_ALG_AAI_HMAC_PRECOMP,
4082 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4089 .cra_name = "authenc(hmac(md5),"
4090 "rfc3686(ctr(aes)))",
4091 .cra_driver_name = "authenc-hmac-md5-"
4092 "rfc3686-ctr-aes-caam",
4095 .setkey = aead_setkey,
4096 .setauthsize = aead_setauthsize,
4097 .encrypt = aead_encrypt,
4098 .decrypt = aead_decrypt,
4099 .ivsize = CTR_RFC3686_IV_SIZE,
4100 .maxauthsize = MD5_DIGEST_SIZE,
4103 .class1_alg_type = OP_ALG_ALGSEL_AES |
4104 OP_ALG_AAI_CTR_MOD128,
4105 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
4106 OP_ALG_AAI_HMAC_PRECOMP,
4107 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
4114 .cra_name = "seqiv(authenc("
4115 "hmac(md5),rfc3686(ctr(aes))))",
4116 .cra_driver_name = "seqiv-authenc-hmac-md5-"
4117 "rfc3686-ctr-aes-caam",
4120 .setkey = aead_setkey,
4121 .setauthsize = aead_setauthsize,
4122 .encrypt = aead_encrypt,
4123 .decrypt = aead_decrypt,
4124 .ivsize = CTR_RFC3686_IV_SIZE,
4125 .maxauthsize = MD5_DIGEST_SIZE,
4128 .class1_alg_type = OP_ALG_ALGSEL_AES |
4129 OP_ALG_AAI_CTR_MOD128,
4130 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
4131 OP_ALG_AAI_HMAC_PRECOMP,
4132 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
4140 .cra_name = "authenc(hmac(sha1),"
4141 "rfc3686(ctr(aes)))",
4142 .cra_driver_name = "authenc-hmac-sha1-"
4143 "rfc3686-ctr-aes-caam",
4146 .setkey = aead_setkey,
4147 .setauthsize = aead_setauthsize,
4148 .encrypt = aead_encrypt,
4149 .decrypt = aead_decrypt,
4150 .ivsize = CTR_RFC3686_IV_SIZE,
4151 .maxauthsize = SHA1_DIGEST_SIZE,
4154 .class1_alg_type = OP_ALG_ALGSEL_AES |
4155 OP_ALG_AAI_CTR_MOD128,
4156 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
4157 OP_ALG_AAI_HMAC_PRECOMP,
4158 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
4165 .cra_name = "seqiv(authenc("
4166 "hmac(sha1),rfc3686(ctr(aes))))",
4167 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
4168 "rfc3686-ctr-aes-caam",
4171 .setkey = aead_setkey,
4172 .setauthsize = aead_setauthsize,
4173 .encrypt = aead_encrypt,
4174 .decrypt = aead_decrypt,
4175 .ivsize = CTR_RFC3686_IV_SIZE,
4176 .maxauthsize = SHA1_DIGEST_SIZE,
4179 .class1_alg_type = OP_ALG_ALGSEL_AES |
4180 OP_ALG_AAI_CTR_MOD128,
4181 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
4182 OP_ALG_AAI_HMAC_PRECOMP,
4183 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
4191 .cra_name = "authenc(hmac(sha224),"
4192 "rfc3686(ctr(aes)))",
4193 .cra_driver_name = "authenc-hmac-sha224-"
4194 "rfc3686-ctr-aes-caam",
4197 .setkey = aead_setkey,
4198 .setauthsize = aead_setauthsize,
4199 .encrypt = aead_encrypt,
4200 .decrypt = aead_decrypt,
4201 .ivsize = CTR_RFC3686_IV_SIZE,
4202 .maxauthsize = SHA224_DIGEST_SIZE,
4205 .class1_alg_type = OP_ALG_ALGSEL_AES |
4206 OP_ALG_AAI_CTR_MOD128,
4207 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
4208 OP_ALG_AAI_HMAC_PRECOMP,
4209 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
4216 .cra_name = "seqiv(authenc("
4217 "hmac(sha224),rfc3686(ctr(aes))))",
4218 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
4219 "rfc3686-ctr-aes-caam",
4222 .setkey = aead_setkey,
4223 .setauthsize = aead_setauthsize,
4224 .encrypt = aead_encrypt,
4225 .decrypt = aead_decrypt,
4226 .ivsize = CTR_RFC3686_IV_SIZE,
4227 .maxauthsize = SHA224_DIGEST_SIZE,
4230 .class1_alg_type = OP_ALG_ALGSEL_AES |
4231 OP_ALG_AAI_CTR_MOD128,
4232 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
4233 OP_ALG_AAI_HMAC_PRECOMP,
4234 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
4242 .cra_name = "authenc(hmac(sha256),"
4243 "rfc3686(ctr(aes)))",
4244 .cra_driver_name = "authenc-hmac-sha256-"
4245 "rfc3686-ctr-aes-caam",
4248 .setkey = aead_setkey,
4249 .setauthsize = aead_setauthsize,
4250 .encrypt = aead_encrypt,
4251 .decrypt = aead_decrypt,
4252 .ivsize = CTR_RFC3686_IV_SIZE,
4253 .maxauthsize = SHA256_DIGEST_SIZE,
4256 .class1_alg_type = OP_ALG_ALGSEL_AES |
4257 OP_ALG_AAI_CTR_MOD128,
4258 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
4259 OP_ALG_AAI_HMAC_PRECOMP,
4260 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
4267 .cra_name = "seqiv(authenc(hmac(sha256),"
4268 "rfc3686(ctr(aes))))",
4269 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
4270 "rfc3686-ctr-aes-caam",
4273 .setkey = aead_setkey,
4274 .setauthsize = aead_setauthsize,
4275 .encrypt = aead_encrypt,
4276 .decrypt = aead_decrypt,
4277 .ivsize = CTR_RFC3686_IV_SIZE,
4278 .maxauthsize = SHA256_DIGEST_SIZE,
4281 .class1_alg_type = OP_ALG_ALGSEL_AES |
4282 OP_ALG_AAI_CTR_MOD128,
4283 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
4284 OP_ALG_AAI_HMAC_PRECOMP,
4285 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
4293 .cra_name = "authenc(hmac(sha384),"
4294 "rfc3686(ctr(aes)))",
4295 .cra_driver_name = "authenc-hmac-sha384-"
4296 "rfc3686-ctr-aes-caam",
4299 .setkey = aead_setkey,
4300 .setauthsize = aead_setauthsize,
4301 .encrypt = aead_encrypt,
4302 .decrypt = aead_decrypt,
4303 .ivsize = CTR_RFC3686_IV_SIZE,
4304 .maxauthsize = SHA384_DIGEST_SIZE,
4307 .class1_alg_type = OP_ALG_ALGSEL_AES |
4308 OP_ALG_AAI_CTR_MOD128,
4309 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4310 OP_ALG_AAI_HMAC_PRECOMP,
4311 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4318 .cra_name = "seqiv(authenc(hmac(sha384),"
4319 "rfc3686(ctr(aes))))",
4320 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
4321 "rfc3686-ctr-aes-caam",
4324 .setkey = aead_setkey,
4325 .setauthsize = aead_setauthsize,
4326 .encrypt = aead_encrypt,
4327 .decrypt = aead_decrypt,
4328 .ivsize = CTR_RFC3686_IV_SIZE,
4329 .maxauthsize = SHA384_DIGEST_SIZE,
4332 .class1_alg_type = OP_ALG_ALGSEL_AES |
4333 OP_ALG_AAI_CTR_MOD128,
4334 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4335 OP_ALG_AAI_HMAC_PRECOMP,
4336 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4344 .cra_name = "authenc(hmac(sha512),"
4345 "rfc3686(ctr(aes)))",
4346 .cra_driver_name = "authenc-hmac-sha512-"
4347 "rfc3686-ctr-aes-caam",
4350 .setkey = aead_setkey,
4351 .setauthsize = aead_setauthsize,
4352 .encrypt = aead_encrypt,
4353 .decrypt = aead_decrypt,
4354 .ivsize = CTR_RFC3686_IV_SIZE,
4355 .maxauthsize = SHA512_DIGEST_SIZE,
4358 .class1_alg_type = OP_ALG_ALGSEL_AES |
4359 OP_ALG_AAI_CTR_MOD128,
4360 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4361 OP_ALG_AAI_HMAC_PRECOMP,
4362 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4369 .cra_name = "seqiv(authenc(hmac(sha512),"
4370 "rfc3686(ctr(aes))))",
4371 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
4372 "rfc3686-ctr-aes-caam",
4375 .setkey = aead_setkey,
4376 .setauthsize = aead_setauthsize,
4377 .encrypt = aead_encrypt,
4378 .decrypt = aead_decrypt,
4379 .ivsize = CTR_RFC3686_IV_SIZE,
4380 .maxauthsize = SHA512_DIGEST_SIZE,
4383 .class1_alg_type = OP_ALG_ALGSEL_AES |
4384 OP_ALG_AAI_CTR_MOD128,
4385 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4386 OP_ALG_AAI_HMAC_PRECOMP,
4387 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4394 struct caam_crypto_alg {
4395 struct crypto_alg crypto_alg;
4396 struct list_head entry;
4397 struct caam_alg_entry caam;
4400 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
4402 ctx->jrdev = caam_jr_alloc();
4403 if (IS_ERR(ctx->jrdev)) {
4404 pr_err("Job Ring Device allocation for transform failed\n");
4405 return PTR_ERR(ctx->jrdev);
4408 /* copy descriptor header template value */
4409 ctx->class1_alg_type = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
4410 ctx->class2_alg_type = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
4411 ctx->alg_op = OP_TYPE_CLASS2_ALG | caam->alg_op;
4416 static int caam_cra_init(struct crypto_tfm *tfm)
4418 struct crypto_alg *alg = tfm->__crt_alg;
4419 struct caam_crypto_alg *caam_alg =
4420 container_of(alg, struct caam_crypto_alg, crypto_alg);
4421 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
4423 return caam_init_common(ctx, &caam_alg->caam);
4426 static int caam_aead_init(struct crypto_aead *tfm)
4428 struct aead_alg *alg = crypto_aead_alg(tfm);
4429 struct caam_aead_alg *caam_alg =
4430 container_of(alg, struct caam_aead_alg, aead);
4431 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
4433 return caam_init_common(ctx, &caam_alg->caam);
4436 static void caam_exit_common(struct caam_ctx *ctx)
4438 if (ctx->sh_desc_enc_dma &&
4439 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_enc_dma))
4440 dma_unmap_single(ctx->jrdev, ctx->sh_desc_enc_dma,
4441 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
4442 if (ctx->sh_desc_dec_dma &&
4443 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_dec_dma))
4444 dma_unmap_single(ctx->jrdev, ctx->sh_desc_dec_dma,
4445 desc_bytes(ctx->sh_desc_dec), DMA_TO_DEVICE);
4446 if (ctx->sh_desc_givenc_dma &&
4447 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_givenc_dma))
4448 dma_unmap_single(ctx->jrdev, ctx->sh_desc_givenc_dma,
4449 desc_bytes(ctx->sh_desc_givenc),
4452 !dma_mapping_error(ctx->jrdev, ctx->key_dma))
4453 dma_unmap_single(ctx->jrdev, ctx->key_dma,
4454 ctx->enckeylen + ctx->split_key_pad_len,
4457 caam_jr_free(ctx->jrdev);
4460 static void caam_cra_exit(struct crypto_tfm *tfm)
4462 caam_exit_common(crypto_tfm_ctx(tfm));
4465 static void caam_aead_exit(struct crypto_aead *tfm)
4467 caam_exit_common(crypto_aead_ctx(tfm));
4470 static void __exit caam_algapi_exit(void)
4473 struct caam_crypto_alg *t_alg, *n;
4476 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
4477 struct caam_aead_alg *t_alg = driver_aeads + i;
4479 if (t_alg->registered)
4480 crypto_unregister_aead(&t_alg->aead);
4486 list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
4487 crypto_unregister_alg(&t_alg->crypto_alg);
4488 list_del(&t_alg->entry);
4493 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
4496 struct caam_crypto_alg *t_alg;
4497 struct crypto_alg *alg;
4499 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
4501 pr_err("failed to allocate t_alg\n");
4502 return ERR_PTR(-ENOMEM);
4505 alg = &t_alg->crypto_alg;
4507 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
4508 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
4509 template->driver_name);
4510 alg->cra_module = THIS_MODULE;
4511 alg->cra_init = caam_cra_init;
4512 alg->cra_exit = caam_cra_exit;
4513 alg->cra_priority = CAAM_CRA_PRIORITY;
4514 alg->cra_blocksize = template->blocksize;
4515 alg->cra_alignmask = 0;
4516 alg->cra_ctxsize = sizeof(struct caam_ctx);
4517 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
4519 switch (template->type) {
4520 case CRYPTO_ALG_TYPE_GIVCIPHER:
4521 alg->cra_type = &crypto_givcipher_type;
4522 alg->cra_ablkcipher = template->template_ablkcipher;
4524 case CRYPTO_ALG_TYPE_ABLKCIPHER:
4525 alg->cra_type = &crypto_ablkcipher_type;
4526 alg->cra_ablkcipher = template->template_ablkcipher;
4530 t_alg->caam.class1_alg_type = template->class1_alg_type;
4531 t_alg->caam.class2_alg_type = template->class2_alg_type;
4532 t_alg->caam.alg_op = template->alg_op;
4537 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
4539 struct aead_alg *alg = &t_alg->aead;
4541 alg->base.cra_module = THIS_MODULE;
4542 alg->base.cra_priority = CAAM_CRA_PRIORITY;
4543 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
4544 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
4546 alg->init = caam_aead_init;
4547 alg->exit = caam_aead_exit;
4550 static int __init caam_algapi_init(void)
4552 struct device_node *dev_node;
4553 struct platform_device *pdev;
4554 struct device *ctrldev;
4555 struct caam_drv_private *priv;
4557 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
4558 unsigned int md_limit = SHA512_DIGEST_SIZE;
4559 bool registered = false;
4561 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
4563 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
4568 pdev = of_find_device_by_node(dev_node);
4570 of_node_put(dev_node);
4574 ctrldev = &pdev->dev;
4575 priv = dev_get_drvdata(ctrldev);
4576 of_node_put(dev_node);
4579 * If priv is NULL, it's probably because the caam driver wasn't
4580 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
4586 INIT_LIST_HEAD(&alg_list);
4589 * Register crypto algorithms the device supports.
4590 * First, detect presence and attributes of DES, AES, and MD blocks.
4592 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
4593 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
4594 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
4595 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
4596 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
4598 /* If MD is present, limit digest size based on LP256 */
4599 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
4600 md_limit = SHA256_DIGEST_SIZE;
4602 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
4603 struct caam_crypto_alg *t_alg;
4604 struct caam_alg_template *alg = driver_algs + i;
4605 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
4607 /* Skip DES algorithms if not supported by device */
4609 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
4610 (alg_sel == OP_ALG_ALGSEL_DES)))
4613 /* Skip AES algorithms if not supported by device */
4614 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
4618 * Check support for AES modes not available
4621 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
4622 if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
4626 t_alg = caam_alg_alloc(alg);
4627 if (IS_ERR(t_alg)) {
4628 err = PTR_ERR(t_alg);
4629 pr_warn("%s alg allocation failed\n", alg->driver_name);
4633 err = crypto_register_alg(&t_alg->crypto_alg);
4635 pr_warn("%s alg registration failed\n",
4636 t_alg->crypto_alg.cra_driver_name);
4641 list_add_tail(&t_alg->entry, &alg_list);
4645 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
4646 struct caam_aead_alg *t_alg = driver_aeads + i;
4647 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
4649 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
4651 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
4653 /* Skip DES algorithms if not supported by device */
4655 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
4656 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
4659 /* Skip AES algorithms if not supported by device */
4660 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
4664 * Check support for AES algorithms not available
4667 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
4668 if (alg_aai == OP_ALG_AAI_GCM)
4672 * Skip algorithms requiring message digests
4673 * if MD or MD size is not supported by device.
4676 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
4679 caam_aead_alg_init(t_alg);
4681 err = crypto_register_aead(&t_alg->aead);
4683 pr_warn("%s alg registration failed\n",
4684 t_alg->aead.base.cra_driver_name);
4688 t_alg->registered = true;
4693 pr_info("caam algorithms registered in /proc/crypto\n");
4698 module_init(caam_algapi_init);
4699 module_exit(caam_algapi_exit);
4701 MODULE_LICENSE("GPL");
4702 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
4703 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");