2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Based on talitos crypto API driver.
8 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
10 * --------------- ---------------
11 * | JobDesc #1 |-------------------->| ShareDesc |
12 * | *(packet 1) | | (PDB) |
13 * --------------- |------------->| (hashKey) |
15 * . | |-------->| (operation) |
16 * --------------- | | ---------------
17 * | JobDesc #2 |------| |
23 * | JobDesc #3 |------------
27 * The SharedDesc never changes for a connection unless rekeyed, but
28 * each packet will likely be in a different place. So all we need
29 * to know to process the packet is where the input is, where the
30 * output goes, and what context we want to process with. Context is
31 * in the SharedDesc, packet references in the JobDesc.
33 * So, a job desc looks like:
35 * ---------------------
37 * | ShareDesc Pointer |
44 * ---------------------
51 #include "desc_constr.h"
54 #include "sg_sw_sec4.h"
60 #define CAAM_CRA_PRIORITY 3000
61 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
62 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
63 CTR_RFC3686_NONCE_SIZE + \
64 SHA512_DIGEST_SIZE * 2)
65 /* max IV is max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
66 #define CAAM_MAX_IV_LENGTH 16
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 /* length of descriptors text */
75 #define DESC_AEAD_BASE (4 * CAAM_CMD_SZ)
76 #define DESC_AEAD_ENC_LEN (DESC_AEAD_BASE + 11 * CAAM_CMD_SZ)
77 #define DESC_AEAD_DEC_LEN (DESC_AEAD_BASE + 15 * CAAM_CMD_SZ)
78 #define DESC_AEAD_GIVENC_LEN (DESC_AEAD_ENC_LEN + 10 * CAAM_CMD_SZ)
80 /* Note: Nonce is counted in enckeylen */
81 #define DESC_AEAD_CTR_RFC3686_LEN (4 * CAAM_CMD_SZ)
83 #define DESC_AEAD_NULL_BASE (3 * CAAM_CMD_SZ)
84 #define DESC_AEAD_NULL_ENC_LEN (DESC_AEAD_NULL_BASE + 11 * CAAM_CMD_SZ)
85 #define DESC_AEAD_NULL_DEC_LEN (DESC_AEAD_NULL_BASE + 13 * CAAM_CMD_SZ)
87 #define DESC_GCM_BASE (3 * CAAM_CMD_SZ)
88 #define DESC_GCM_ENC_LEN (DESC_GCM_BASE + 16 * CAAM_CMD_SZ)
89 #define DESC_GCM_DEC_LEN (DESC_GCM_BASE + 12 * CAAM_CMD_SZ)
91 #define DESC_RFC4106_BASE (3 * CAAM_CMD_SZ)
92 #define DESC_RFC4106_ENC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
93 #define DESC_RFC4106_DEC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
95 #define DESC_RFC4543_BASE (3 * CAAM_CMD_SZ)
96 #define DESC_RFC4543_ENC_LEN (DESC_RFC4543_BASE + 11 * CAAM_CMD_SZ)
97 #define DESC_RFC4543_DEC_LEN (DESC_RFC4543_BASE + 12 * CAAM_CMD_SZ)
99 #define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ)
100 #define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \
102 #define DESC_ABLKCIPHER_DEC_LEN (DESC_ABLKCIPHER_BASE + \
105 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
106 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
109 /* for print_hex_dumps with line references */
110 #define debug(format, arg...) printk(format, arg)
112 #define debug(format, arg...)
114 static struct list_head alg_list;
116 struct caam_alg_entry {
124 struct caam_aead_alg {
125 struct aead_alg aead;
126 struct caam_alg_entry caam;
130 /* Set DK bit in class 1 operation if shared */
131 static inline void append_dec_op1(u32 *desc, u32 type)
133 u32 *jump_cmd, *uncond_jump_cmd;
135 /* DK bit is valid only for AES */
136 if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
137 append_operation(desc, type | OP_ALG_AS_INITFINAL |
142 jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
143 append_operation(desc, type | OP_ALG_AS_INITFINAL |
145 uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
146 set_jump_tgt_here(desc, jump_cmd);
147 append_operation(desc, type | OP_ALG_AS_INITFINAL |
148 OP_ALG_DECRYPT | OP_ALG_AAI_DK);
149 set_jump_tgt_here(desc, uncond_jump_cmd);
153 * For aead functions, read payload and write payload,
154 * both of which are specified in req->src and req->dst
156 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
158 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
159 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
160 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
164 * For ablkcipher encrypt and decrypt, read from req->src and
167 static inline void ablkcipher_append_src_dst(u32 *desc)
169 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
170 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
171 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
172 KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
173 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
177 * per-session context
180 struct device *jrdev;
181 u32 sh_desc_enc[DESC_MAX_USED_LEN];
182 u32 sh_desc_dec[DESC_MAX_USED_LEN];
183 u32 sh_desc_givenc[DESC_MAX_USED_LEN];
184 dma_addr_t sh_desc_enc_dma;
185 dma_addr_t sh_desc_dec_dma;
186 dma_addr_t sh_desc_givenc_dma;
190 u8 key[CAAM_MAX_KEY_SIZE];
192 unsigned int enckeylen;
193 unsigned int split_key_len;
194 unsigned int split_key_pad_len;
195 unsigned int authsize;
198 static void append_key_aead(u32 *desc, struct caam_ctx *ctx,
199 int keys_fit_inline, bool is_rfc3686)
202 unsigned int enckeylen = ctx->enckeylen;
206 * | ctx->key = {AUTH_KEY, ENC_KEY, NONCE}
207 * | enckeylen = encryption key size + nonce size
210 enckeylen -= CTR_RFC3686_NONCE_SIZE;
212 if (keys_fit_inline) {
213 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
214 ctx->split_key_len, CLASS_2 |
215 KEY_DEST_MDHA_SPLIT | KEY_ENC);
216 append_key_as_imm(desc, (void *)ctx->key +
217 ctx->split_key_pad_len, enckeylen,
218 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
220 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
221 KEY_DEST_MDHA_SPLIT | KEY_ENC);
222 append_key(desc, ctx->key_dma + ctx->split_key_pad_len,
223 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
226 /* Load Counter into CONTEXT1 reg */
228 nonce = (u32 *)((void *)ctx->key + ctx->split_key_pad_len +
230 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
231 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
234 MOVE_DEST_CLASS1CTX |
235 (16 << MOVE_OFFSET_SHIFT) |
236 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
240 static void init_sh_desc_key_aead(u32 *desc, struct caam_ctx *ctx,
241 int keys_fit_inline, bool is_rfc3686)
245 /* Note: Context registers are saved. */
246 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
248 /* Skip if already shared */
249 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
252 append_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
254 set_jump_tgt_here(desc, key_jump_cmd);
257 static int aead_null_set_sh_desc(struct crypto_aead *aead)
259 struct caam_ctx *ctx = crypto_aead_ctx(aead);
260 struct device *jrdev = ctx->jrdev;
261 bool keys_fit_inline = false;
262 u32 *key_jump_cmd, *jump_cmd, *read_move_cmd, *write_move_cmd;
266 * Job Descriptor and Shared Descriptors
267 * must all fit into the 64-word Descriptor h/w Buffer
269 if (DESC_AEAD_NULL_ENC_LEN + AEAD_DESC_JOB_IO_LEN +
270 ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
271 keys_fit_inline = true;
273 /* aead_encrypt shared descriptor */
274 desc = ctx->sh_desc_enc;
276 init_sh_desc(desc, HDR_SHARE_SERIAL);
278 /* Skip if already shared */
279 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
282 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
283 ctx->split_key_len, CLASS_2 |
284 KEY_DEST_MDHA_SPLIT | KEY_ENC);
286 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
287 KEY_DEST_MDHA_SPLIT | KEY_ENC);
288 set_jump_tgt_here(desc, key_jump_cmd);
290 /* assoclen + cryptlen = seqinlen */
291 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
293 /* Prepare to read and write cryptlen + assoclen bytes */
294 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
295 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
298 * MOVE_LEN opcode is not available in all SEC HW revisions,
299 * thus need to do some magic, i.e. self-patch the descriptor
302 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
304 (0x6 << MOVE_LEN_SHIFT));
305 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
308 (0x8 << MOVE_LEN_SHIFT));
310 /* Class 2 operation */
311 append_operation(desc, ctx->class2_alg_type |
312 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
314 /* Read and write cryptlen bytes */
315 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
317 set_move_tgt_here(desc, read_move_cmd);
318 set_move_tgt_here(desc, write_move_cmd);
319 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
320 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
324 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
325 LDST_SRCDST_BYTE_CONTEXT);
327 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
330 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
331 dev_err(jrdev, "unable to map shared descriptor\n");
335 print_hex_dump(KERN_ERR,
336 "aead null enc shdesc@"__stringify(__LINE__)": ",
337 DUMP_PREFIX_ADDRESS, 16, 4, desc,
338 desc_bytes(desc), 1);
342 * Job Descriptor and Shared Descriptors
343 * must all fit into the 64-word Descriptor h/w Buffer
345 keys_fit_inline = false;
346 if (DESC_AEAD_NULL_DEC_LEN + DESC_JOB_IO_LEN +
347 ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
348 keys_fit_inline = true;
350 desc = ctx->sh_desc_dec;
352 /* aead_decrypt shared descriptor */
353 init_sh_desc(desc, HDR_SHARE_SERIAL);
355 /* Skip if already shared */
356 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
359 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
360 ctx->split_key_len, CLASS_2 |
361 KEY_DEST_MDHA_SPLIT | KEY_ENC);
363 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
364 KEY_DEST_MDHA_SPLIT | KEY_ENC);
365 set_jump_tgt_here(desc, key_jump_cmd);
367 /* Class 2 operation */
368 append_operation(desc, ctx->class2_alg_type |
369 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
371 /* assoclen + cryptlen = seqoutlen */
372 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
374 /* Prepare to read and write cryptlen + assoclen bytes */
375 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
376 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
379 * MOVE_LEN opcode is not available in all SEC HW revisions,
380 * thus need to do some magic, i.e. self-patch the descriptor
383 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
385 (0x6 << MOVE_LEN_SHIFT));
386 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
389 (0x8 << MOVE_LEN_SHIFT));
391 /* Read and write cryptlen bytes */
392 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
395 * Insert a NOP here, since we need at least 4 instructions between
396 * code patching the descriptor buffer and the location being patched.
398 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
399 set_jump_tgt_here(desc, jump_cmd);
401 set_move_tgt_here(desc, read_move_cmd);
402 set_move_tgt_here(desc, write_move_cmd);
403 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
404 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
406 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
409 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
410 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
412 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
415 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
416 dev_err(jrdev, "unable to map shared descriptor\n");
420 print_hex_dump(KERN_ERR,
421 "aead null dec shdesc@"__stringify(__LINE__)": ",
422 DUMP_PREFIX_ADDRESS, 16, 4, desc,
423 desc_bytes(desc), 1);
429 static int aead_set_sh_desc(struct crypto_aead *aead)
431 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
432 struct caam_aead_alg, aead);
433 unsigned int ivsize = crypto_aead_ivsize(aead);
434 struct caam_ctx *ctx = crypto_aead_ctx(aead);
435 struct device *jrdev = ctx->jrdev;
436 bool keys_fit_inline;
441 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
442 OP_ALG_AAI_CTR_MOD128);
443 const bool is_rfc3686 = alg->caam.rfc3686;
448 /* NULL encryption / decryption */
450 return aead_null_set_sh_desc(aead);
453 * AES-CTR needs to load IV in CONTEXT1 reg
454 * at an offset of 128bits (16bytes)
455 * CONTEXT1[255:128] = IV
462 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
465 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
471 * Job Descriptor and Shared Descriptors
472 * must all fit into the 64-word Descriptor h/w Buffer
474 keys_fit_inline = false;
475 if (DESC_AEAD_ENC_LEN + AUTHENC_DESC_JOB_IO_LEN +
476 ctx->split_key_pad_len + ctx->enckeylen +
477 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
479 keys_fit_inline = true;
481 /* aead_encrypt shared descriptor */
482 desc = ctx->sh_desc_enc;
484 /* Note: Context registers are saved. */
485 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
487 /* Class 2 operation */
488 append_operation(desc, ctx->class2_alg_type |
489 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
491 /* Read and write assoclen bytes */
492 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
493 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
495 /* Skip assoc data */
496 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
498 /* read assoc before reading payload */
499 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
502 /* Load Counter into CONTEXT1 reg */
504 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
506 LDST_SRCDST_BYTE_CONTEXT |
507 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
510 /* Class 1 operation */
511 append_operation(desc, ctx->class1_alg_type |
512 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
514 /* Read and write cryptlen bytes */
515 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
516 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
517 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
520 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
521 LDST_SRCDST_BYTE_CONTEXT);
523 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
526 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
527 dev_err(jrdev, "unable to map shared descriptor\n");
531 print_hex_dump(KERN_ERR, "aead enc shdesc@"__stringify(__LINE__)": ",
532 DUMP_PREFIX_ADDRESS, 16, 4, desc,
533 desc_bytes(desc), 1);
538 * Job Descriptor and Shared Descriptors
539 * must all fit into the 64-word Descriptor h/w Buffer
541 keys_fit_inline = false;
542 if (DESC_AEAD_DEC_LEN + AUTHENC_DESC_JOB_IO_LEN +
543 ctx->split_key_pad_len + ctx->enckeylen +
544 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
546 keys_fit_inline = true;
548 /* aead_decrypt shared descriptor */
549 desc = ctx->sh_desc_dec;
551 /* Note: Context registers are saved. */
552 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
554 /* Class 2 operation */
555 append_operation(desc, ctx->class2_alg_type |
556 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
558 /* Read and write assoclen bytes */
559 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
561 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM, ivsize);
563 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
565 /* Skip assoc data */
566 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
568 /* read assoc before reading payload */
569 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
572 if (alg->caam.geniv) {
573 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
574 LDST_SRCDST_BYTE_CONTEXT |
575 (ctx1_iv_off << LDST_OFFSET_SHIFT));
576 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
577 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
580 /* Load Counter into CONTEXT1 reg */
582 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
584 LDST_SRCDST_BYTE_CONTEXT |
585 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
588 /* Choose operation */
590 append_operation(desc, ctx->class1_alg_type |
591 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
593 append_dec_op1(desc, ctx->class1_alg_type);
595 /* Read and write cryptlen bytes */
596 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
597 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
598 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
601 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
602 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
604 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
607 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
608 dev_err(jrdev, "unable to map shared descriptor\n");
612 print_hex_dump(KERN_ERR, "aead dec shdesc@"__stringify(__LINE__)": ",
613 DUMP_PREFIX_ADDRESS, 16, 4, desc,
614 desc_bytes(desc), 1);
617 if (!alg->caam.geniv)
621 * Job Descriptor and Shared Descriptors
622 * must all fit into the 64-word Descriptor h/w Buffer
624 keys_fit_inline = false;
625 if (DESC_AEAD_GIVENC_LEN + AUTHENC_DESC_JOB_IO_LEN +
626 ctx->split_key_pad_len + ctx->enckeylen +
627 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
629 keys_fit_inline = true;
631 /* aead_givencrypt shared descriptor */
632 desc = ctx->sh_desc_enc;
634 /* Note: Context registers are saved. */
635 init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
641 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
642 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
643 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
644 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
645 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
646 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
647 append_move(desc, MOVE_WAITCOMP |
648 MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
649 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
650 (ivsize << MOVE_LEN_SHIFT));
651 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
654 /* Copy IV to class 1 context */
655 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
656 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
657 (ivsize << MOVE_LEN_SHIFT));
659 /* Return to encryption */
660 append_operation(desc, ctx->class2_alg_type |
661 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
663 /* Read and write assoclen bytes */
664 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
665 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
667 /* ivsize + cryptlen = seqoutlen - authsize */
668 append_math_sub_imm_u32(desc, REG3, SEQOUTLEN, IMM, ctx->authsize);
670 /* Skip assoc data */
671 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
673 /* read assoc before reading payload */
674 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
677 /* Copy iv from outfifo to class 2 fifo */
678 moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
679 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
680 append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
681 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
682 append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
683 LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
685 /* Load Counter into CONTEXT1 reg */
687 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
689 LDST_SRCDST_BYTE_CONTEXT |
690 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
693 /* Class 1 operation */
694 append_operation(desc, ctx->class1_alg_type |
695 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
697 /* Will write ivsize + cryptlen */
698 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
700 /* Not need to reload iv */
701 append_seq_fifo_load(desc, ivsize,
704 /* Will read cryptlen */
705 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
708 * Wait for IV transfer (ofifo -> class2) to finish before starting
709 * ciphertext transfer (ofifo -> external memory).
711 wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NIFP);
712 set_jump_tgt_here(desc, wait_cmd);
714 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
715 FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
716 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
719 append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
720 LDST_SRCDST_BYTE_CONTEXT);
722 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
725 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
726 dev_err(jrdev, "unable to map shared descriptor\n");
730 print_hex_dump(KERN_ERR, "aead givenc shdesc@"__stringify(__LINE__)": ",
731 DUMP_PREFIX_ADDRESS, 16, 4, desc,
732 desc_bytes(desc), 1);
739 static int aead_setauthsize(struct crypto_aead *authenc,
740 unsigned int authsize)
742 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
744 ctx->authsize = authsize;
745 aead_set_sh_desc(authenc);
750 static int gcm_set_sh_desc(struct crypto_aead *aead)
752 struct caam_ctx *ctx = crypto_aead_ctx(aead);
753 struct device *jrdev = ctx->jrdev;
754 bool keys_fit_inline = false;
755 u32 *key_jump_cmd, *zero_payload_jump_cmd,
756 *zero_assoc_jump_cmd1, *zero_assoc_jump_cmd2;
759 if (!ctx->enckeylen || !ctx->authsize)
763 * AES GCM encrypt shared descriptor
764 * Job Descriptor and Shared Descriptor
765 * must fit into the 64-word Descriptor h/w Buffer
767 if (DESC_GCM_ENC_LEN + GCM_DESC_JOB_IO_LEN +
768 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
769 keys_fit_inline = true;
771 desc = ctx->sh_desc_enc;
773 init_sh_desc(desc, HDR_SHARE_SERIAL);
775 /* skip key loading if they are loaded due to sharing */
776 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
777 JUMP_COND_SHRD | JUMP_COND_SELF);
779 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
780 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
782 append_key(desc, ctx->key_dma, ctx->enckeylen,
783 CLASS_1 | KEY_DEST_CLASS_REG);
784 set_jump_tgt_here(desc, key_jump_cmd);
786 /* class 1 operation */
787 append_operation(desc, ctx->class1_alg_type |
788 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
790 /* if assoclen + cryptlen is ZERO, skip to ICV write */
791 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
792 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
795 /* if assoclen is ZERO, skip reading the assoc data */
796 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
797 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
800 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
802 /* skip assoc data */
803 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
805 /* cryptlen = seqinlen - assoclen */
806 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
808 /* if cryptlen is ZERO jump to zero-payload commands */
809 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
812 /* read assoc data */
813 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
814 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
815 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
817 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
819 /* write encrypted data */
820 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
822 /* read payload data */
823 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
824 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
826 /* jump the zero-payload commands */
827 append_jump(desc, JUMP_TEST_ALL | 2);
829 /* zero-payload commands */
830 set_jump_tgt_here(desc, zero_payload_jump_cmd);
832 /* read assoc data */
833 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
834 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
836 /* There is no input data */
837 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
840 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
841 LDST_SRCDST_BYTE_CONTEXT);
843 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
846 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
847 dev_err(jrdev, "unable to map shared descriptor\n");
851 print_hex_dump(KERN_ERR, "gcm enc shdesc@"__stringify(__LINE__)": ",
852 DUMP_PREFIX_ADDRESS, 16, 4, desc,
853 desc_bytes(desc), 1);
857 * Job Descriptor and Shared Descriptors
858 * must all fit into the 64-word Descriptor h/w Buffer
860 keys_fit_inline = false;
861 if (DESC_GCM_DEC_LEN + GCM_DESC_JOB_IO_LEN +
862 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
863 keys_fit_inline = true;
865 desc = ctx->sh_desc_dec;
867 init_sh_desc(desc, HDR_SHARE_SERIAL);
869 /* skip key loading if they are loaded due to sharing */
870 key_jump_cmd = append_jump(desc, JUMP_JSL |
871 JUMP_TEST_ALL | JUMP_COND_SHRD |
874 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
875 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
877 append_key(desc, ctx->key_dma, ctx->enckeylen,
878 CLASS_1 | KEY_DEST_CLASS_REG);
879 set_jump_tgt_here(desc, key_jump_cmd);
881 /* class 1 operation */
882 append_operation(desc, ctx->class1_alg_type |
883 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
885 /* if assoclen is ZERO, skip reading the assoc data */
886 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
887 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
890 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
892 /* skip assoc data */
893 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
895 /* read assoc data */
896 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
897 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
899 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
901 /* cryptlen = seqoutlen - assoclen */
902 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
904 /* jump to zero-payload command if cryptlen is zero */
905 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
908 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
910 /* store encrypted data */
911 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
913 /* read payload data */
914 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
915 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
917 /* zero-payload command */
918 set_jump_tgt_here(desc, zero_payload_jump_cmd);
921 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
922 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
924 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
927 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
928 dev_err(jrdev, "unable to map shared descriptor\n");
932 print_hex_dump(KERN_ERR, "gcm dec shdesc@"__stringify(__LINE__)": ",
933 DUMP_PREFIX_ADDRESS, 16, 4, desc,
934 desc_bytes(desc), 1);
940 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
942 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
944 ctx->authsize = authsize;
945 gcm_set_sh_desc(authenc);
950 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
952 struct caam_ctx *ctx = crypto_aead_ctx(aead);
953 struct device *jrdev = ctx->jrdev;
954 bool keys_fit_inline = false;
958 if (!ctx->enckeylen || !ctx->authsize)
962 * RFC4106 encrypt shared descriptor
963 * Job Descriptor and Shared Descriptor
964 * must fit into the 64-word Descriptor h/w Buffer
966 if (DESC_RFC4106_ENC_LEN + GCM_DESC_JOB_IO_LEN +
967 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
968 keys_fit_inline = true;
970 desc = ctx->sh_desc_enc;
972 init_sh_desc(desc, HDR_SHARE_SERIAL);
974 /* Skip key loading if it is loaded due to sharing */
975 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
978 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
979 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
981 append_key(desc, ctx->key_dma, ctx->enckeylen,
982 CLASS_1 | KEY_DEST_CLASS_REG);
983 set_jump_tgt_here(desc, key_jump_cmd);
985 /* Class 1 operation */
986 append_operation(desc, ctx->class1_alg_type |
987 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
989 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
990 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
992 /* Read assoc data */
993 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
994 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
997 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
999 /* Will read cryptlen bytes */
1000 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1002 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1003 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1005 /* Skip assoc data */
1006 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1008 /* cryptlen = seqoutlen - assoclen */
1009 append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
1011 /* Write encrypted data */
1012 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1014 /* Read payload data */
1015 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1016 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1019 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
1020 LDST_SRCDST_BYTE_CONTEXT);
1022 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1025 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1026 dev_err(jrdev, "unable to map shared descriptor\n");
1030 print_hex_dump(KERN_ERR, "rfc4106 enc shdesc@"__stringify(__LINE__)": ",
1031 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1032 desc_bytes(desc), 1);
1036 * Job Descriptor and Shared Descriptors
1037 * must all fit into the 64-word Descriptor h/w Buffer
1039 keys_fit_inline = false;
1040 if (DESC_RFC4106_DEC_LEN + DESC_JOB_IO_LEN +
1041 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1042 keys_fit_inline = true;
1044 desc = ctx->sh_desc_dec;
1046 init_sh_desc(desc, HDR_SHARE_SERIAL);
1048 /* Skip key loading if it is loaded due to sharing */
1049 key_jump_cmd = append_jump(desc, JUMP_JSL |
1050 JUMP_TEST_ALL | JUMP_COND_SHRD);
1051 if (keys_fit_inline)
1052 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1053 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1055 append_key(desc, ctx->key_dma, ctx->enckeylen,
1056 CLASS_1 | KEY_DEST_CLASS_REG);
1057 set_jump_tgt_here(desc, key_jump_cmd);
1059 /* Class 1 operation */
1060 append_operation(desc, ctx->class1_alg_type |
1061 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1063 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
1064 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1066 /* Read assoc data */
1067 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1068 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1071 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1073 /* Will read cryptlen bytes */
1074 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1076 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1077 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1079 /* Skip assoc data */
1080 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1082 /* Will write cryptlen bytes */
1083 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1085 /* Store payload data */
1086 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1088 /* Read encrypted data */
1089 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1090 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1093 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
1094 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1096 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1099 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1100 dev_err(jrdev, "unable to map shared descriptor\n");
1104 print_hex_dump(KERN_ERR, "rfc4106 dec shdesc@"__stringify(__LINE__)": ",
1105 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1106 desc_bytes(desc), 1);
1112 static int rfc4106_setauthsize(struct crypto_aead *authenc,
1113 unsigned int authsize)
1115 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
1117 ctx->authsize = authsize;
1118 rfc4106_set_sh_desc(authenc);
1123 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
1125 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1126 struct device *jrdev = ctx->jrdev;
1127 bool keys_fit_inline = false;
1129 u32 *read_move_cmd, *write_move_cmd;
1132 if (!ctx->enckeylen || !ctx->authsize)
1136 * RFC4543 encrypt shared descriptor
1137 * Job Descriptor and Shared Descriptor
1138 * must fit into the 64-word Descriptor h/w Buffer
1140 if (DESC_RFC4543_ENC_LEN + GCM_DESC_JOB_IO_LEN +
1141 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1142 keys_fit_inline = true;
1144 desc = ctx->sh_desc_enc;
1146 init_sh_desc(desc, HDR_SHARE_SERIAL);
1148 /* Skip key loading if it is loaded due to sharing */
1149 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1151 if (keys_fit_inline)
1152 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1153 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1155 append_key(desc, ctx->key_dma, ctx->enckeylen,
1156 CLASS_1 | KEY_DEST_CLASS_REG);
1157 set_jump_tgt_here(desc, key_jump_cmd);
1159 /* Class 1 operation */
1160 append_operation(desc, ctx->class1_alg_type |
1161 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1163 /* assoclen + cryptlen = seqinlen */
1164 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1167 * MOVE_LEN opcode is not available in all SEC HW revisions,
1168 * thus need to do some magic, i.e. self-patch the descriptor
1171 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1172 (0x6 << MOVE_LEN_SHIFT));
1173 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1174 (0x8 << MOVE_LEN_SHIFT));
1176 /* Will read assoclen + cryptlen bytes */
1177 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1179 /* Will write assoclen + cryptlen bytes */
1180 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1182 /* Read and write assoclen + cryptlen bytes */
1183 aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1185 set_move_tgt_here(desc, read_move_cmd);
1186 set_move_tgt_here(desc, write_move_cmd);
1187 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1188 /* Move payload data to OFIFO */
1189 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1192 append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
1193 LDST_SRCDST_BYTE_CONTEXT);
1195 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1198 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1199 dev_err(jrdev, "unable to map shared descriptor\n");
1203 print_hex_dump(KERN_ERR, "rfc4543 enc shdesc@"__stringify(__LINE__)": ",
1204 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1205 desc_bytes(desc), 1);
1209 * Job Descriptor and Shared Descriptors
1210 * must all fit into the 64-word Descriptor h/w Buffer
1212 keys_fit_inline = false;
1213 if (DESC_RFC4543_DEC_LEN + GCM_DESC_JOB_IO_LEN +
1214 ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1215 keys_fit_inline = true;
1217 desc = ctx->sh_desc_dec;
1219 init_sh_desc(desc, HDR_SHARE_SERIAL);
1221 /* Skip key loading if it is loaded due to sharing */
1222 key_jump_cmd = append_jump(desc, JUMP_JSL |
1223 JUMP_TEST_ALL | JUMP_COND_SHRD);
1224 if (keys_fit_inline)
1225 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1226 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1228 append_key(desc, ctx->key_dma, ctx->enckeylen,
1229 CLASS_1 | KEY_DEST_CLASS_REG);
1230 set_jump_tgt_here(desc, key_jump_cmd);
1232 /* Class 1 operation */
1233 append_operation(desc, ctx->class1_alg_type |
1234 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1236 /* assoclen + cryptlen = seqoutlen */
1237 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1240 * MOVE_LEN opcode is not available in all SEC HW revisions,
1241 * thus need to do some magic, i.e. self-patch the descriptor
1244 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1245 (0x6 << MOVE_LEN_SHIFT));
1246 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1247 (0x8 << MOVE_LEN_SHIFT));
1249 /* Will read assoclen + cryptlen bytes */
1250 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1252 /* Will write assoclen + cryptlen bytes */
1253 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1255 /* Store payload data */
1256 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1258 /* In-snoop assoclen + cryptlen data */
1259 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1260 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1262 set_move_tgt_here(desc, read_move_cmd);
1263 set_move_tgt_here(desc, write_move_cmd);
1264 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1265 /* Move payload data to OFIFO */
1266 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1267 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1270 append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
1271 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1273 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1276 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1277 dev_err(jrdev, "unable to map shared descriptor\n");
1281 print_hex_dump(KERN_ERR, "rfc4543 dec shdesc@"__stringify(__LINE__)": ",
1282 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1283 desc_bytes(desc), 1);
1289 static int rfc4543_setauthsize(struct crypto_aead *authenc,
1290 unsigned int authsize)
1292 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
1294 ctx->authsize = authsize;
1295 rfc4543_set_sh_desc(authenc);
1300 static u32 gen_split_aead_key(struct caam_ctx *ctx, const u8 *key_in,
1303 return gen_split_key(ctx->jrdev, ctx->key, ctx->split_key_len,
1304 ctx->split_key_pad_len, key_in, authkeylen,
1308 static int aead_setkey(struct crypto_aead *aead,
1309 const u8 *key, unsigned int keylen)
1311 /* Sizes for MDHA pads (*not* keys): MD5, SHA1, 224, 256, 384, 512 */
1312 static const u8 mdpadlen[] = { 16, 20, 32, 32, 64, 64 };
1313 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1314 struct device *jrdev = ctx->jrdev;
1315 struct crypto_authenc_keys keys;
1318 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
1321 /* Pick class 2 key length from algorithm submask */
1322 ctx->split_key_len = mdpadlen[(ctx->alg_op & OP_ALG_ALGSEL_SUBMASK) >>
1323 OP_ALG_ALGSEL_SHIFT] * 2;
1324 ctx->split_key_pad_len = ALIGN(ctx->split_key_len, 16);
1326 if (ctx->split_key_pad_len + keys.enckeylen > CAAM_MAX_KEY_SIZE)
1330 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
1331 keys.authkeylen + keys.enckeylen, keys.enckeylen,
1333 printk(KERN_ERR "split_key_len %d split_key_pad_len %d\n",
1334 ctx->split_key_len, ctx->split_key_pad_len);
1335 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1336 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1339 ret = gen_split_aead_key(ctx, keys.authkey, keys.authkeylen);
1344 /* postpend encryption key to auth split key */
1345 memcpy(ctx->key + ctx->split_key_pad_len, keys.enckey, keys.enckeylen);
1347 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->split_key_pad_len +
1348 keys.enckeylen, DMA_TO_DEVICE);
1349 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1350 dev_err(jrdev, "unable to map key i/o memory\n");
1354 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
1355 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
1356 ctx->split_key_pad_len + keys.enckeylen, 1);
1359 ctx->enckeylen = keys.enckeylen;
1361 ret = aead_set_sh_desc(aead);
1363 dma_unmap_single(jrdev, ctx->key_dma, ctx->split_key_pad_len +
1364 keys.enckeylen, DMA_TO_DEVICE);
1369 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
1373 static int gcm_setkey(struct crypto_aead *aead,
1374 const u8 *key, unsigned int keylen)
1376 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1377 struct device *jrdev = ctx->jrdev;
1381 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1382 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1385 memcpy(ctx->key, key, keylen);
1386 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
1388 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1389 dev_err(jrdev, "unable to map key i/o memory\n");
1392 ctx->enckeylen = keylen;
1394 ret = gcm_set_sh_desc(aead);
1396 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1403 static int rfc4106_setkey(struct crypto_aead *aead,
1404 const u8 *key, unsigned int keylen)
1406 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1407 struct device *jrdev = ctx->jrdev;
1414 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1415 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1418 memcpy(ctx->key, key, keylen);
1421 * The last four bytes of the key material are used as the salt value
1422 * in the nonce. Update the AES key length.
1424 ctx->enckeylen = keylen - 4;
1426 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen,
1428 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1429 dev_err(jrdev, "unable to map key i/o memory\n");
1433 ret = rfc4106_set_sh_desc(aead);
1435 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1442 static int rfc4543_setkey(struct crypto_aead *aead,
1443 const u8 *key, unsigned int keylen)
1445 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1446 struct device *jrdev = ctx->jrdev;
1453 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1454 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1457 memcpy(ctx->key, key, keylen);
1460 * The last four bytes of the key material are used as the salt value
1461 * in the nonce. Update the AES key length.
1463 ctx->enckeylen = keylen - 4;
1465 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen,
1467 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1468 dev_err(jrdev, "unable to map key i/o memory\n");
1472 ret = rfc4543_set_sh_desc(aead);
1474 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1481 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
1482 const u8 *key, unsigned int keylen)
1484 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1485 struct ablkcipher_tfm *crt = &ablkcipher->base.crt_ablkcipher;
1486 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
1487 const char *alg_name = crypto_tfm_alg_name(tfm);
1488 struct device *jrdev = ctx->jrdev;
1494 u32 ctx1_iv_off = 0;
1495 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
1496 OP_ALG_AAI_CTR_MOD128);
1497 const bool is_rfc3686 = (ctr_mode &&
1498 (strstr(alg_name, "rfc3686") != NULL));
1501 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1502 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1505 * AES-CTR needs to load IV in CONTEXT1 reg
1506 * at an offset of 128bits (16bytes)
1507 * CONTEXT1[255:128] = IV
1514 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1515 * | *key = {KEY, NONCE}
1518 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
1519 keylen -= CTR_RFC3686_NONCE_SIZE;
1522 memcpy(ctx->key, key, keylen);
1523 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
1525 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1526 dev_err(jrdev, "unable to map key i/o memory\n");
1529 ctx->enckeylen = keylen;
1531 /* ablkcipher_encrypt shared descriptor */
1532 desc = ctx->sh_desc_enc;
1533 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1534 /* Skip if already shared */
1535 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1538 /* Load class1 key only */
1539 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1540 ctx->enckeylen, CLASS_1 |
1541 KEY_DEST_CLASS_REG);
1543 /* Load nonce into CONTEXT1 reg */
1545 nonce = (u32 *)(key + keylen);
1546 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
1547 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1548 append_move(desc, MOVE_WAITCOMP |
1550 MOVE_DEST_CLASS1CTX |
1551 (16 << MOVE_OFFSET_SHIFT) |
1552 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1555 set_jump_tgt_here(desc, key_jump_cmd);
1558 append_seq_load(desc, crt->ivsize, LDST_SRCDST_BYTE_CONTEXT |
1559 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1561 /* Load counter into CONTEXT1 reg */
1563 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
1565 LDST_SRCDST_BYTE_CONTEXT |
1566 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1567 LDST_OFFSET_SHIFT));
1569 /* Load operation */
1570 append_operation(desc, ctx->class1_alg_type |
1571 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1573 /* Perform operation */
1574 ablkcipher_append_src_dst(desc);
1576 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1579 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1580 dev_err(jrdev, "unable to map shared descriptor\n");
1584 print_hex_dump(KERN_ERR,
1585 "ablkcipher enc shdesc@"__stringify(__LINE__)": ",
1586 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1587 desc_bytes(desc), 1);
1589 /* ablkcipher_decrypt shared descriptor */
1590 desc = ctx->sh_desc_dec;
1592 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1593 /* Skip if already shared */
1594 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1597 /* Load class1 key only */
1598 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1599 ctx->enckeylen, CLASS_1 |
1600 KEY_DEST_CLASS_REG);
1602 /* Load nonce into CONTEXT1 reg */
1604 nonce = (u32 *)(key + keylen);
1605 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
1606 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1607 append_move(desc, MOVE_WAITCOMP |
1609 MOVE_DEST_CLASS1CTX |
1610 (16 << MOVE_OFFSET_SHIFT) |
1611 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1614 set_jump_tgt_here(desc, key_jump_cmd);
1617 append_seq_load(desc, crt->ivsize, LDST_SRCDST_BYTE_CONTEXT |
1618 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1620 /* Load counter into CONTEXT1 reg */
1622 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
1624 LDST_SRCDST_BYTE_CONTEXT |
1625 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1626 LDST_OFFSET_SHIFT));
1628 /* Choose operation */
1630 append_operation(desc, ctx->class1_alg_type |
1631 OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
1633 append_dec_op1(desc, ctx->class1_alg_type);
1635 /* Perform operation */
1636 ablkcipher_append_src_dst(desc);
1638 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1641 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1642 dev_err(jrdev, "unable to map shared descriptor\n");
1647 print_hex_dump(KERN_ERR,
1648 "ablkcipher dec shdesc@"__stringify(__LINE__)": ",
1649 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1650 desc_bytes(desc), 1);
1652 /* ablkcipher_givencrypt shared descriptor */
1653 desc = ctx->sh_desc_givenc;
1655 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1656 /* Skip if already shared */
1657 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1660 /* Load class1 key only */
1661 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1662 ctx->enckeylen, CLASS_1 |
1663 KEY_DEST_CLASS_REG);
1665 /* Load Nonce into CONTEXT1 reg */
1667 nonce = (u32 *)(key + keylen);
1668 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
1669 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1670 append_move(desc, MOVE_WAITCOMP |
1672 MOVE_DEST_CLASS1CTX |
1673 (16 << MOVE_OFFSET_SHIFT) |
1674 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1676 set_jump_tgt_here(desc, key_jump_cmd);
1679 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
1680 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
1681 NFIFOENTRY_PTYPE_RND | (crt->ivsize << NFIFOENTRY_DLEN_SHIFT);
1682 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
1683 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
1684 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1685 append_move(desc, MOVE_WAITCOMP |
1687 MOVE_DEST_CLASS1CTX |
1688 (crt->ivsize << MOVE_LEN_SHIFT) |
1689 (ctx1_iv_off << MOVE_OFFSET_SHIFT));
1690 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1692 /* Copy generated IV to memory */
1693 append_seq_store(desc, crt->ivsize,
1694 LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1695 (ctx1_iv_off << LDST_OFFSET_SHIFT));
1697 /* Load Counter into CONTEXT1 reg */
1699 append_load_imm_u32(desc, (u32)1, LDST_IMM |
1701 LDST_SRCDST_BYTE_CONTEXT |
1702 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1703 LDST_OFFSET_SHIFT));
1706 append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
1707 (1 << JUMP_OFFSET_SHIFT));
1709 /* Load operation */
1710 append_operation(desc, ctx->class1_alg_type |
1711 OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1713 /* Perform operation */
1714 ablkcipher_append_src_dst(desc);
1716 ctx->sh_desc_givenc_dma = dma_map_single(jrdev, desc,
1719 if (dma_mapping_error(jrdev, ctx->sh_desc_givenc_dma)) {
1720 dev_err(jrdev, "unable to map shared descriptor\n");
1724 print_hex_dump(KERN_ERR,
1725 "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
1726 DUMP_PREFIX_ADDRESS, 16, 4, desc,
1727 desc_bytes(desc), 1);
1733 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
1734 const u8 *key, unsigned int keylen)
1736 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1737 struct device *jrdev = ctx->jrdev;
1738 u32 *key_jump_cmd, *desc;
1739 __be64 sector_size = cpu_to_be64(512);
1741 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
1742 crypto_ablkcipher_set_flags(ablkcipher,
1743 CRYPTO_TFM_RES_BAD_KEY_LEN);
1744 dev_err(jrdev, "key size mismatch\n");
1748 memcpy(ctx->key, key, keylen);
1749 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, DMA_TO_DEVICE);
1750 if (dma_mapping_error(jrdev, ctx->key_dma)) {
1751 dev_err(jrdev, "unable to map key i/o memory\n");
1754 ctx->enckeylen = keylen;
1756 /* xts_ablkcipher_encrypt shared descriptor */
1757 desc = ctx->sh_desc_enc;
1758 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1759 /* Skip if already shared */
1760 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1763 /* Load class1 keys only */
1764 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1765 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1767 /* Load sector size with index 40 bytes (0x28) */
1768 append_cmd(desc, CMD_LOAD | IMMEDIATE | LDST_SRCDST_BYTE_CONTEXT |
1769 LDST_CLASS_1_CCB | (0x28 << LDST_OFFSET_SHIFT) | 8);
1770 append_data(desc, (void *)§or_size, 8);
1772 set_jump_tgt_here(desc, key_jump_cmd);
1775 * create sequence for loading the sector index
1776 * Upper 8B of IV - will be used as sector index
1777 * Lower 8B of IV - will be discarded
1779 append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
1780 LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT) | 8);
1781 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1783 /* Load operation */
1784 append_operation(desc, ctx->class1_alg_type | OP_ALG_AS_INITFINAL |
1787 /* Perform operation */
1788 ablkcipher_append_src_dst(desc);
1790 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
1792 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1793 dev_err(jrdev, "unable to map shared descriptor\n");
1797 print_hex_dump(KERN_ERR,
1798 "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
1799 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1802 /* xts_ablkcipher_decrypt shared descriptor */
1803 desc = ctx->sh_desc_dec;
1805 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1806 /* Skip if already shared */
1807 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1810 /* Load class1 key only */
1811 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1812 ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1814 /* Load sector size with index 40 bytes (0x28) */
1815 append_cmd(desc, CMD_LOAD | IMMEDIATE | LDST_SRCDST_BYTE_CONTEXT |
1816 LDST_CLASS_1_CCB | (0x28 << LDST_OFFSET_SHIFT) | 8);
1817 append_data(desc, (void *)§or_size, 8);
1819 set_jump_tgt_here(desc, key_jump_cmd);
1822 * create sequence for loading the sector index
1823 * Upper 8B of IV - will be used as sector index
1824 * Lower 8B of IV - will be discarded
1826 append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
1827 LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT) | 8);
1828 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1830 /* Load operation */
1831 append_dec_op1(desc, ctx->class1_alg_type);
1833 /* Perform operation */
1834 ablkcipher_append_src_dst(desc);
1836 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
1838 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1839 dma_unmap_single(jrdev, ctx->sh_desc_enc_dma,
1840 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
1841 dev_err(jrdev, "unable to map shared descriptor\n");
1845 print_hex_dump(KERN_ERR,
1846 "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
1847 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1854 * aead_edesc - s/w-extended aead descriptor
1855 * @assoc_nents: number of segments in associated data (SPI+Seq) scatterlist
1856 * @src_nents: number of segments in input scatterlist
1857 * @dst_nents: number of segments in output scatterlist
1858 * @iv_dma: dma address of iv for checking continuity and link table
1859 * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1860 * @sec4_sg_bytes: length of dma mapped sec4_sg space
1861 * @sec4_sg_dma: bus physical mapped address of h/w link table
1862 * @hw_desc: the h/w job descriptor followed by any referenced link tables
1870 dma_addr_t sec4_sg_dma;
1871 struct sec4_sg_entry *sec4_sg;
1876 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
1877 * @src_nents: number of segments in input scatterlist
1878 * @dst_nents: number of segments in output scatterlist
1879 * @iv_dma: dma address of iv for checking continuity and link table
1880 * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1881 * @sec4_sg_bytes: length of dma mapped sec4_sg space
1882 * @sec4_sg_dma: bus physical mapped address of h/w link table
1883 * @hw_desc: the h/w job descriptor followed by any referenced link tables
1885 struct ablkcipher_edesc {
1890 dma_addr_t sec4_sg_dma;
1891 struct sec4_sg_entry *sec4_sg;
1895 static void caam_unmap(struct device *dev, struct scatterlist *src,
1896 struct scatterlist *dst, int src_nents,
1898 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
1902 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_TO_DEVICE);
1903 dma_unmap_sg(dev, dst, dst_nents ? : 1, DMA_FROM_DEVICE);
1905 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_BIDIRECTIONAL);
1909 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
1911 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
1915 static void aead_unmap(struct device *dev,
1916 struct aead_edesc *edesc,
1917 struct aead_request *req)
1919 caam_unmap(dev, req->src, req->dst,
1920 edesc->src_nents, edesc->dst_nents, 0, 0,
1921 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
1924 static void ablkcipher_unmap(struct device *dev,
1925 struct ablkcipher_edesc *edesc,
1926 struct ablkcipher_request *req)
1928 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1929 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1931 caam_unmap(dev, req->src, req->dst,
1932 edesc->src_nents, edesc->dst_nents,
1933 edesc->iv_dma, ivsize,
1934 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
1937 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
1940 struct aead_request *req = context;
1941 struct aead_edesc *edesc;
1944 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1947 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
1950 caam_jr_strstatus(jrdev, err);
1952 aead_unmap(jrdev, edesc, req);
1956 aead_request_complete(req, err);
1959 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1962 struct aead_request *req = context;
1963 struct aead_edesc *edesc;
1966 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1969 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
1972 caam_jr_strstatus(jrdev, err);
1974 aead_unmap(jrdev, edesc, req);
1977 * verify hw auth check passed else return -EBADMSG
1979 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
1984 aead_request_complete(req, err);
1987 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
1990 struct ablkcipher_request *req = context;
1991 struct ablkcipher_edesc *edesc;
1993 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1994 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1996 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1999 edesc = (struct ablkcipher_edesc *)((char *)desc -
2000 offsetof(struct ablkcipher_edesc, hw_desc));
2003 caam_jr_strstatus(jrdev, err);
2006 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
2007 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2008 edesc->src_nents > 1 ? 100 : ivsize, 1);
2009 print_hex_dump(KERN_ERR, "dst @"__stringify(__LINE__)": ",
2010 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2011 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
2014 ablkcipher_unmap(jrdev, edesc, req);
2017 ablkcipher_request_complete(req, err);
2020 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
2023 struct ablkcipher_request *req = context;
2024 struct ablkcipher_edesc *edesc;
2026 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2027 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2029 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
2032 edesc = (struct ablkcipher_edesc *)((char *)desc -
2033 offsetof(struct ablkcipher_edesc, hw_desc));
2035 caam_jr_strstatus(jrdev, err);
2038 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
2039 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2041 print_hex_dump(KERN_ERR, "dst @"__stringify(__LINE__)": ",
2042 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2043 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
2046 ablkcipher_unmap(jrdev, edesc, req);
2049 ablkcipher_request_complete(req, err);
2053 * Fill in aead job descriptor
2055 static void init_aead_job(struct aead_request *req,
2056 struct aead_edesc *edesc,
2057 bool all_contig, bool encrypt)
2059 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2060 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2061 int authsize = ctx->authsize;
2062 u32 *desc = edesc->hw_desc;
2063 u32 out_options, in_options;
2064 dma_addr_t dst_dma, src_dma;
2065 int len, sec4_sg_index = 0;
2069 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
2070 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
2072 len = desc_len(sh_desc);
2073 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2076 src_dma = sg_dma_address(req->src);
2079 src_dma = edesc->sec4_sg_dma;
2080 sec4_sg_index += edesc->src_nents;
2081 in_options = LDST_SGF;
2084 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
2088 out_options = in_options;
2090 if (unlikely(req->src != req->dst)) {
2091 if (!edesc->dst_nents) {
2092 dst_dma = sg_dma_address(req->dst);
2095 dst_dma = edesc->sec4_sg_dma +
2097 sizeof(struct sec4_sg_entry);
2098 out_options = LDST_SGF;
2103 append_seq_out_ptr(desc, dst_dma,
2104 req->assoclen + req->cryptlen + authsize,
2107 append_seq_out_ptr(desc, dst_dma,
2108 req->assoclen + req->cryptlen - authsize,
2111 /* REG3 = assoclen */
2112 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
2115 static void init_gcm_job(struct aead_request *req,
2116 struct aead_edesc *edesc,
2117 bool all_contig, bool encrypt)
2119 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2120 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2121 unsigned int ivsize = crypto_aead_ivsize(aead);
2122 u32 *desc = edesc->hw_desc;
2123 bool generic_gcm = (ivsize == 12);
2126 init_aead_job(req, edesc, all_contig, encrypt);
2128 /* BUG This should not be specific to generic GCM. */
2130 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
2131 last = FIFOLD_TYPE_LAST1;
2134 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
2135 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
2138 append_data(desc, ctx->key + ctx->enckeylen, 4);
2140 append_data(desc, req->iv, ivsize);
2141 /* End of blank commands */
2144 static void init_authenc_job(struct aead_request *req,
2145 struct aead_edesc *edesc,
2146 bool all_contig, bool encrypt)
2148 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2149 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
2150 struct caam_aead_alg, aead);
2151 unsigned int ivsize = crypto_aead_ivsize(aead);
2152 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2153 const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
2154 OP_ALG_AAI_CTR_MOD128);
2155 const bool is_rfc3686 = alg->caam.rfc3686;
2156 u32 *desc = edesc->hw_desc;
2160 * AES-CTR needs to load IV in CONTEXT1 reg
2161 * at an offset of 128bits (16bytes)
2162 * CONTEXT1[255:128] = IV
2169 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
2172 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
2174 init_aead_job(req, edesc, all_contig, encrypt);
2176 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
2177 append_load_as_imm(desc, req->iv, ivsize,
2179 LDST_SRCDST_BYTE_CONTEXT |
2180 (ivoffset << LDST_OFFSET_SHIFT));
2184 * Fill in ablkcipher job descriptor
2186 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
2187 struct ablkcipher_edesc *edesc,
2188 struct ablkcipher_request *req,
2191 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2192 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2193 u32 *desc = edesc->hw_desc;
2194 u32 out_options = 0, in_options;
2195 dma_addr_t dst_dma, src_dma;
2196 int len, sec4_sg_index = 0;
2199 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
2200 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2202 print_hex_dump(KERN_ERR, "src @"__stringify(__LINE__)": ",
2203 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2204 edesc->src_nents ? 100 : req->nbytes, 1);
2207 len = desc_len(sh_desc);
2208 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2211 src_dma = edesc->iv_dma;
2214 src_dma = edesc->sec4_sg_dma;
2215 sec4_sg_index += edesc->src_nents + 1;
2216 in_options = LDST_SGF;
2218 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
2220 if (likely(req->src == req->dst)) {
2221 if (!edesc->src_nents && iv_contig) {
2222 dst_dma = sg_dma_address(req->src);
2224 dst_dma = edesc->sec4_sg_dma +
2225 sizeof(struct sec4_sg_entry);
2226 out_options = LDST_SGF;
2229 if (!edesc->dst_nents) {
2230 dst_dma = sg_dma_address(req->dst);
2232 dst_dma = edesc->sec4_sg_dma +
2233 sec4_sg_index * sizeof(struct sec4_sg_entry);
2234 out_options = LDST_SGF;
2237 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
2241 * Fill in ablkcipher givencrypt job descriptor
2243 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
2244 struct ablkcipher_edesc *edesc,
2245 struct ablkcipher_request *req,
2248 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2249 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2250 u32 *desc = edesc->hw_desc;
2251 u32 out_options, in_options;
2252 dma_addr_t dst_dma, src_dma;
2253 int len, sec4_sg_index = 0;
2256 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
2257 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2259 print_hex_dump(KERN_ERR, "src @" __stringify(__LINE__) ": ",
2260 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2261 edesc->src_nents ? 100 : req->nbytes, 1);
2264 len = desc_len(sh_desc);
2265 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2267 if (!edesc->src_nents) {
2268 src_dma = sg_dma_address(req->src);
2271 src_dma = edesc->sec4_sg_dma;
2272 sec4_sg_index += edesc->src_nents;
2273 in_options = LDST_SGF;
2275 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
2278 dst_dma = edesc->iv_dma;
2281 dst_dma = edesc->sec4_sg_dma +
2282 sec4_sg_index * sizeof(struct sec4_sg_entry);
2283 out_options = LDST_SGF;
2285 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
2289 * allocate and map the aead extended descriptor
2291 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
2292 int desc_bytes, bool *all_contig_ptr,
2295 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2296 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2297 struct device *jrdev = ctx->jrdev;
2298 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2299 CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC;
2300 int src_nents, dst_nents = 0;
2301 struct aead_edesc *edesc;
2303 bool all_contig = true;
2304 int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes;
2305 unsigned int authsize = ctx->authsize;
2307 if (unlikely(req->dst != req->src)) {
2308 src_nents = sg_count(req->src, req->assoclen + req->cryptlen);
2309 dst_nents = sg_count(req->dst,
2310 req->assoclen + req->cryptlen +
2311 (encrypt ? authsize : (-authsize)));
2313 src_nents = sg_count(req->src,
2314 req->assoclen + req->cryptlen +
2315 (encrypt ? authsize : 0));
2318 /* Check if data are contiguous. */
2319 all_contig = !src_nents;
2321 src_nents = src_nents ? : 1;
2322 sec4_sg_len = src_nents;
2325 sec4_sg_len += dst_nents;
2327 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
2329 /* allocate space for base edesc and hw desc commands, link tables */
2330 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2333 dev_err(jrdev, "could not allocate extended descriptor\n");
2334 return ERR_PTR(-ENOMEM);
2337 if (likely(req->src == req->dst)) {
2338 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2340 if (unlikely(!sgc)) {
2341 dev_err(jrdev, "unable to map source\n");
2343 return ERR_PTR(-ENOMEM);
2346 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2348 if (unlikely(!sgc)) {
2349 dev_err(jrdev, "unable to map source\n");
2351 return ERR_PTR(-ENOMEM);
2354 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2356 if (unlikely(!sgc)) {
2357 dev_err(jrdev, "unable to map destination\n");
2358 dma_unmap_sg(jrdev, req->src, src_nents ? : 1,
2361 return ERR_PTR(-ENOMEM);
2365 edesc->src_nents = src_nents;
2366 edesc->dst_nents = dst_nents;
2367 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
2369 *all_contig_ptr = all_contig;
2373 sg_to_sec4_sg_last(req->src, src_nents,
2374 edesc->sec4_sg + sec4_sg_index, 0);
2375 sec4_sg_index += src_nents;
2378 sg_to_sec4_sg_last(req->dst, dst_nents,
2379 edesc->sec4_sg + sec4_sg_index, 0);
2385 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2386 sec4_sg_bytes, DMA_TO_DEVICE);
2387 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2388 dev_err(jrdev, "unable to map S/G table\n");
2389 aead_unmap(jrdev, edesc, req);
2391 return ERR_PTR(-ENOMEM);
2394 edesc->sec4_sg_bytes = sec4_sg_bytes;
2399 static int gcm_encrypt(struct aead_request *req)
2401 struct aead_edesc *edesc;
2402 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2403 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2404 struct device *jrdev = ctx->jrdev;
2409 /* allocate extended descriptor */
2410 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
2412 return PTR_ERR(edesc);
2414 /* Create and submit job descriptor */
2415 init_gcm_job(req, edesc, all_contig, true);
2417 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2418 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2419 desc_bytes(edesc->hw_desc), 1);
2422 desc = edesc->hw_desc;
2423 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2427 aead_unmap(jrdev, edesc, req);
2434 static int ipsec_gcm_encrypt(struct aead_request *req)
2436 if (req->assoclen < 8)
2439 return gcm_encrypt(req);
2442 static int aead_encrypt(struct aead_request *req)
2444 struct aead_edesc *edesc;
2445 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2446 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2447 struct device *jrdev = ctx->jrdev;
2452 /* allocate extended descriptor */
2453 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
2456 return PTR_ERR(edesc);
2458 /* Create and submit job descriptor */
2459 init_authenc_job(req, edesc, all_contig, true);
2461 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2462 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2463 desc_bytes(edesc->hw_desc), 1);
2466 desc = edesc->hw_desc;
2467 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2471 aead_unmap(jrdev, edesc, req);
2478 static int gcm_decrypt(struct aead_request *req)
2480 struct aead_edesc *edesc;
2481 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2482 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2483 struct device *jrdev = ctx->jrdev;
2488 /* allocate extended descriptor */
2489 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
2491 return PTR_ERR(edesc);
2493 /* Create and submit job descriptor*/
2494 init_gcm_job(req, edesc, all_contig, false);
2496 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2497 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2498 desc_bytes(edesc->hw_desc), 1);
2501 desc = edesc->hw_desc;
2502 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2506 aead_unmap(jrdev, edesc, req);
2513 static int ipsec_gcm_decrypt(struct aead_request *req)
2515 if (req->assoclen < 8)
2518 return gcm_decrypt(req);
2521 static int aead_decrypt(struct aead_request *req)
2523 struct aead_edesc *edesc;
2524 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2525 struct caam_ctx *ctx = crypto_aead_ctx(aead);
2526 struct device *jrdev = ctx->jrdev;
2531 /* allocate extended descriptor */
2532 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
2533 &all_contig, false);
2535 return PTR_ERR(edesc);
2538 print_hex_dump(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
2539 DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2540 req->assoclen + req->cryptlen, 1);
2543 /* Create and submit job descriptor*/
2544 init_authenc_job(req, edesc, all_contig, false);
2546 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2547 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2548 desc_bytes(edesc->hw_desc), 1);
2551 desc = edesc->hw_desc;
2552 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2556 aead_unmap(jrdev, edesc, req);
2564 * allocate and map the ablkcipher extended descriptor for ablkcipher
2566 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
2567 *req, int desc_bytes,
2568 bool *iv_contig_out)
2570 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2571 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2572 struct device *jrdev = ctx->jrdev;
2573 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2574 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
2575 GFP_KERNEL : GFP_ATOMIC;
2576 int src_nents, dst_nents = 0, sec4_sg_bytes;
2577 struct ablkcipher_edesc *edesc;
2578 dma_addr_t iv_dma = 0;
2579 bool iv_contig = false;
2581 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2584 src_nents = sg_count(req->src, req->nbytes);
2586 if (req->dst != req->src)
2587 dst_nents = sg_count(req->dst, req->nbytes);
2589 if (likely(req->src == req->dst)) {
2590 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2593 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2595 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2599 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
2600 if (dma_mapping_error(jrdev, iv_dma)) {
2601 dev_err(jrdev, "unable to map IV\n");
2602 return ERR_PTR(-ENOMEM);
2606 * Check if iv can be contiguous with source and destination.
2607 * If so, include it. If not, create scatterlist.
2609 if (!src_nents && iv_dma + ivsize == sg_dma_address(req->src))
2612 src_nents = src_nents ? : 1;
2613 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
2614 sizeof(struct sec4_sg_entry);
2616 /* allocate space for base edesc and hw desc commands, link tables */
2617 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2620 dev_err(jrdev, "could not allocate extended descriptor\n");
2621 return ERR_PTR(-ENOMEM);
2624 edesc->src_nents = src_nents;
2625 edesc->dst_nents = dst_nents;
2626 edesc->sec4_sg_bytes = sec4_sg_bytes;
2627 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
2632 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
2633 sg_to_sec4_sg_last(req->src, src_nents,
2634 edesc->sec4_sg + 1, 0);
2635 sec4_sg_index += 1 + src_nents;
2639 sg_to_sec4_sg_last(req->dst, dst_nents,
2640 edesc->sec4_sg + sec4_sg_index, 0);
2643 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2644 sec4_sg_bytes, DMA_TO_DEVICE);
2645 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2646 dev_err(jrdev, "unable to map S/G table\n");
2647 return ERR_PTR(-ENOMEM);
2650 edesc->iv_dma = iv_dma;
2653 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
2654 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
2658 *iv_contig_out = iv_contig;
2662 static int ablkcipher_encrypt(struct ablkcipher_request *req)
2664 struct ablkcipher_edesc *edesc;
2665 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2666 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2667 struct device *jrdev = ctx->jrdev;
2672 /* allocate extended descriptor */
2673 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
2674 CAAM_CMD_SZ, &iv_contig);
2676 return PTR_ERR(edesc);
2678 /* Create and submit job descriptor*/
2679 init_ablkcipher_job(ctx->sh_desc_enc,
2680 ctx->sh_desc_enc_dma, edesc, req, iv_contig);
2682 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
2683 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2684 desc_bytes(edesc->hw_desc), 1);
2686 desc = edesc->hw_desc;
2687 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
2692 ablkcipher_unmap(jrdev, edesc, req);
2699 static int ablkcipher_decrypt(struct ablkcipher_request *req)
2701 struct ablkcipher_edesc *edesc;
2702 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2703 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2704 struct device *jrdev = ctx->jrdev;
2709 /* allocate extended descriptor */
2710 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
2711 CAAM_CMD_SZ, &iv_contig);
2713 return PTR_ERR(edesc);
2715 /* Create and submit job descriptor*/
2716 init_ablkcipher_job(ctx->sh_desc_dec,
2717 ctx->sh_desc_dec_dma, edesc, req, iv_contig);
2718 desc = edesc->hw_desc;
2720 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
2721 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2722 desc_bytes(edesc->hw_desc), 1);
2725 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
2729 ablkcipher_unmap(jrdev, edesc, req);
2737 * allocate and map the ablkcipher extended descriptor
2738 * for ablkcipher givencrypt
2740 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
2741 struct skcipher_givcrypt_request *greq,
2743 bool *iv_contig_out)
2745 struct ablkcipher_request *req = &greq->creq;
2746 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2747 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2748 struct device *jrdev = ctx->jrdev;
2749 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2750 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
2751 GFP_KERNEL : GFP_ATOMIC;
2752 int src_nents, dst_nents = 0, sec4_sg_bytes;
2753 struct ablkcipher_edesc *edesc;
2754 dma_addr_t iv_dma = 0;
2755 bool iv_contig = false;
2757 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2760 src_nents = sg_count(req->src, req->nbytes);
2762 if (unlikely(req->dst != req->src))
2763 dst_nents = sg_count(req->dst, req->nbytes);
2765 if (likely(req->src == req->dst)) {
2766 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2769 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2771 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2776 * Check if iv can be contiguous with source and destination.
2777 * If so, include it. If not, create scatterlist.
2779 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
2780 if (dma_mapping_error(jrdev, iv_dma)) {
2781 dev_err(jrdev, "unable to map IV\n");
2782 return ERR_PTR(-ENOMEM);
2785 if (!dst_nents && iv_dma + ivsize == sg_dma_address(req->dst))
2788 dst_nents = dst_nents ? : 1;
2789 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
2790 sizeof(struct sec4_sg_entry);
2792 /* allocate space for base edesc and hw desc commands, link tables */
2793 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2796 dev_err(jrdev, "could not allocate extended descriptor\n");
2797 return ERR_PTR(-ENOMEM);
2800 edesc->src_nents = src_nents;
2801 edesc->dst_nents = dst_nents;
2802 edesc->sec4_sg_bytes = sec4_sg_bytes;
2803 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
2808 sg_to_sec4_sg_last(req->src, src_nents, edesc->sec4_sg, 0);
2809 sec4_sg_index += src_nents;
2813 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index,
2816 sg_to_sec4_sg_last(req->dst, dst_nents,
2817 edesc->sec4_sg + sec4_sg_index, 0);
2820 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2821 sec4_sg_bytes, DMA_TO_DEVICE);
2822 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2823 dev_err(jrdev, "unable to map S/G table\n");
2824 return ERR_PTR(-ENOMEM);
2826 edesc->iv_dma = iv_dma;
2829 print_hex_dump(KERN_ERR,
2830 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
2831 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
2835 *iv_contig_out = iv_contig;
2839 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
2841 struct ablkcipher_request *req = &creq->creq;
2842 struct ablkcipher_edesc *edesc;
2843 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2844 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2845 struct device *jrdev = ctx->jrdev;
2850 /* allocate extended descriptor */
2851 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
2852 CAAM_CMD_SZ, &iv_contig);
2854 return PTR_ERR(edesc);
2856 /* Create and submit job descriptor*/
2857 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
2858 edesc, req, iv_contig);
2860 print_hex_dump(KERN_ERR,
2861 "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
2862 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2863 desc_bytes(edesc->hw_desc), 1);
2865 desc = edesc->hw_desc;
2866 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
2871 ablkcipher_unmap(jrdev, edesc, req);
2878 #define template_aead template_u.aead
2879 #define template_ablkcipher template_u.ablkcipher
2880 struct caam_alg_template {
2881 char name[CRYPTO_MAX_ALG_NAME];
2882 char driver_name[CRYPTO_MAX_ALG_NAME];
2883 unsigned int blocksize;
2886 struct ablkcipher_alg ablkcipher;
2888 u32 class1_alg_type;
2889 u32 class2_alg_type;
2893 static struct caam_alg_template driver_algs[] = {
2894 /* ablkcipher descriptor */
2897 .driver_name = "cbc-aes-caam",
2898 .blocksize = AES_BLOCK_SIZE,
2899 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2900 .template_ablkcipher = {
2901 .setkey = ablkcipher_setkey,
2902 .encrypt = ablkcipher_encrypt,
2903 .decrypt = ablkcipher_decrypt,
2904 .givencrypt = ablkcipher_givencrypt,
2905 .geniv = "<built-in>",
2906 .min_keysize = AES_MIN_KEY_SIZE,
2907 .max_keysize = AES_MAX_KEY_SIZE,
2908 .ivsize = AES_BLOCK_SIZE,
2910 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2913 .name = "cbc(des3_ede)",
2914 .driver_name = "cbc-3des-caam",
2915 .blocksize = DES3_EDE_BLOCK_SIZE,
2916 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2917 .template_ablkcipher = {
2918 .setkey = ablkcipher_setkey,
2919 .encrypt = ablkcipher_encrypt,
2920 .decrypt = ablkcipher_decrypt,
2921 .givencrypt = ablkcipher_givencrypt,
2922 .geniv = "<built-in>",
2923 .min_keysize = DES3_EDE_KEY_SIZE,
2924 .max_keysize = DES3_EDE_KEY_SIZE,
2925 .ivsize = DES3_EDE_BLOCK_SIZE,
2927 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2931 .driver_name = "cbc-des-caam",
2932 .blocksize = DES_BLOCK_SIZE,
2933 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2934 .template_ablkcipher = {
2935 .setkey = ablkcipher_setkey,
2936 .encrypt = ablkcipher_encrypt,
2937 .decrypt = ablkcipher_decrypt,
2938 .givencrypt = ablkcipher_givencrypt,
2939 .geniv = "<built-in>",
2940 .min_keysize = DES_KEY_SIZE,
2941 .max_keysize = DES_KEY_SIZE,
2942 .ivsize = DES_BLOCK_SIZE,
2944 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2948 .driver_name = "ctr-aes-caam",
2950 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2951 .template_ablkcipher = {
2952 .setkey = ablkcipher_setkey,
2953 .encrypt = ablkcipher_encrypt,
2954 .decrypt = ablkcipher_decrypt,
2956 .min_keysize = AES_MIN_KEY_SIZE,
2957 .max_keysize = AES_MAX_KEY_SIZE,
2958 .ivsize = AES_BLOCK_SIZE,
2960 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
2963 .name = "rfc3686(ctr(aes))",
2964 .driver_name = "rfc3686-ctr-aes-caam",
2966 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2967 .template_ablkcipher = {
2968 .setkey = ablkcipher_setkey,
2969 .encrypt = ablkcipher_encrypt,
2970 .decrypt = ablkcipher_decrypt,
2971 .givencrypt = ablkcipher_givencrypt,
2972 .geniv = "<built-in>",
2973 .min_keysize = AES_MIN_KEY_SIZE +
2974 CTR_RFC3686_NONCE_SIZE,
2975 .max_keysize = AES_MAX_KEY_SIZE +
2976 CTR_RFC3686_NONCE_SIZE,
2977 .ivsize = CTR_RFC3686_IV_SIZE,
2979 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
2983 .driver_name = "xts-aes-caam",
2984 .blocksize = AES_BLOCK_SIZE,
2985 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2986 .template_ablkcipher = {
2987 .setkey = xts_ablkcipher_setkey,
2988 .encrypt = ablkcipher_encrypt,
2989 .decrypt = ablkcipher_decrypt,
2991 .min_keysize = 2 * AES_MIN_KEY_SIZE,
2992 .max_keysize = 2 * AES_MAX_KEY_SIZE,
2993 .ivsize = AES_BLOCK_SIZE,
2995 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
2999 static struct caam_aead_alg driver_aeads[] = {
3003 .cra_name = "rfc4106(gcm(aes))",
3004 .cra_driver_name = "rfc4106-gcm-aes-caam",
3007 .setkey = rfc4106_setkey,
3008 .setauthsize = rfc4106_setauthsize,
3009 .encrypt = ipsec_gcm_encrypt,
3010 .decrypt = ipsec_gcm_decrypt,
3012 .maxauthsize = AES_BLOCK_SIZE,
3015 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3021 .cra_name = "rfc4543(gcm(aes))",
3022 .cra_driver_name = "rfc4543-gcm-aes-caam",
3025 .setkey = rfc4543_setkey,
3026 .setauthsize = rfc4543_setauthsize,
3027 .encrypt = ipsec_gcm_encrypt,
3028 .decrypt = ipsec_gcm_decrypt,
3030 .maxauthsize = AES_BLOCK_SIZE,
3033 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3036 /* Galois Counter Mode */
3040 .cra_name = "gcm(aes)",
3041 .cra_driver_name = "gcm-aes-caam",
3044 .setkey = gcm_setkey,
3045 .setauthsize = gcm_setauthsize,
3046 .encrypt = gcm_encrypt,
3047 .decrypt = gcm_decrypt,
3049 .maxauthsize = AES_BLOCK_SIZE,
3052 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3055 /* single-pass ipsec_esp descriptor */
3059 .cra_name = "authenc(hmac(md5),"
3060 "ecb(cipher_null))",
3061 .cra_driver_name = "authenc-hmac-md5-"
3062 "ecb-cipher_null-caam",
3063 .cra_blocksize = NULL_BLOCK_SIZE,
3065 .setkey = aead_setkey,
3066 .setauthsize = aead_setauthsize,
3067 .encrypt = aead_encrypt,
3068 .decrypt = aead_decrypt,
3069 .ivsize = NULL_IV_SIZE,
3070 .maxauthsize = MD5_DIGEST_SIZE,
3073 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3074 OP_ALG_AAI_HMAC_PRECOMP,
3075 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3081 .cra_name = "authenc(hmac(sha1),"
3082 "ecb(cipher_null))",
3083 .cra_driver_name = "authenc-hmac-sha1-"
3084 "ecb-cipher_null-caam",
3085 .cra_blocksize = NULL_BLOCK_SIZE,
3087 .setkey = aead_setkey,
3088 .setauthsize = aead_setauthsize,
3089 .encrypt = aead_encrypt,
3090 .decrypt = aead_decrypt,
3091 .ivsize = NULL_IV_SIZE,
3092 .maxauthsize = SHA1_DIGEST_SIZE,
3095 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3096 OP_ALG_AAI_HMAC_PRECOMP,
3097 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3103 .cra_name = "authenc(hmac(sha224),"
3104 "ecb(cipher_null))",
3105 .cra_driver_name = "authenc-hmac-sha224-"
3106 "ecb-cipher_null-caam",
3107 .cra_blocksize = NULL_BLOCK_SIZE,
3109 .setkey = aead_setkey,
3110 .setauthsize = aead_setauthsize,
3111 .encrypt = aead_encrypt,
3112 .decrypt = aead_decrypt,
3113 .ivsize = NULL_IV_SIZE,
3114 .maxauthsize = SHA224_DIGEST_SIZE,
3117 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3118 OP_ALG_AAI_HMAC_PRECOMP,
3119 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3125 .cra_name = "authenc(hmac(sha256),"
3126 "ecb(cipher_null))",
3127 .cra_driver_name = "authenc-hmac-sha256-"
3128 "ecb-cipher_null-caam",
3129 .cra_blocksize = NULL_BLOCK_SIZE,
3131 .setkey = aead_setkey,
3132 .setauthsize = aead_setauthsize,
3133 .encrypt = aead_encrypt,
3134 .decrypt = aead_decrypt,
3135 .ivsize = NULL_IV_SIZE,
3136 .maxauthsize = SHA256_DIGEST_SIZE,
3139 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3140 OP_ALG_AAI_HMAC_PRECOMP,
3141 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3147 .cra_name = "authenc(hmac(sha384),"
3148 "ecb(cipher_null))",
3149 .cra_driver_name = "authenc-hmac-sha384-"
3150 "ecb-cipher_null-caam",
3151 .cra_blocksize = NULL_BLOCK_SIZE,
3153 .setkey = aead_setkey,
3154 .setauthsize = aead_setauthsize,
3155 .encrypt = aead_encrypt,
3156 .decrypt = aead_decrypt,
3157 .ivsize = NULL_IV_SIZE,
3158 .maxauthsize = SHA384_DIGEST_SIZE,
3161 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3162 OP_ALG_AAI_HMAC_PRECOMP,
3163 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3169 .cra_name = "authenc(hmac(sha512),"
3170 "ecb(cipher_null))",
3171 .cra_driver_name = "authenc-hmac-sha512-"
3172 "ecb-cipher_null-caam",
3173 .cra_blocksize = NULL_BLOCK_SIZE,
3175 .setkey = aead_setkey,
3176 .setauthsize = aead_setauthsize,
3177 .encrypt = aead_encrypt,
3178 .decrypt = aead_decrypt,
3179 .ivsize = NULL_IV_SIZE,
3180 .maxauthsize = SHA512_DIGEST_SIZE,
3183 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3184 OP_ALG_AAI_HMAC_PRECOMP,
3185 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3191 .cra_name = "authenc(hmac(md5),cbc(aes))",
3192 .cra_driver_name = "authenc-hmac-md5-"
3194 .cra_blocksize = AES_BLOCK_SIZE,
3196 .setkey = aead_setkey,
3197 .setauthsize = aead_setauthsize,
3198 .encrypt = aead_encrypt,
3199 .decrypt = aead_decrypt,
3200 .ivsize = AES_BLOCK_SIZE,
3201 .maxauthsize = MD5_DIGEST_SIZE,
3204 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3205 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3206 OP_ALG_AAI_HMAC_PRECOMP,
3207 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3213 .cra_name = "echainiv(authenc(hmac(md5),"
3215 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3217 .cra_blocksize = AES_BLOCK_SIZE,
3219 .setkey = aead_setkey,
3220 .setauthsize = aead_setauthsize,
3221 .encrypt = aead_encrypt,
3222 .decrypt = aead_decrypt,
3223 .ivsize = AES_BLOCK_SIZE,
3224 .maxauthsize = MD5_DIGEST_SIZE,
3227 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3228 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3229 OP_ALG_AAI_HMAC_PRECOMP,
3230 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3237 .cra_name = "authenc(hmac(sha1),cbc(aes))",
3238 .cra_driver_name = "authenc-hmac-sha1-"
3240 .cra_blocksize = AES_BLOCK_SIZE,
3242 .setkey = aead_setkey,
3243 .setauthsize = aead_setauthsize,
3244 .encrypt = aead_encrypt,
3245 .decrypt = aead_decrypt,
3246 .ivsize = AES_BLOCK_SIZE,
3247 .maxauthsize = SHA1_DIGEST_SIZE,
3250 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3251 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3252 OP_ALG_AAI_HMAC_PRECOMP,
3253 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3259 .cra_name = "echainiv(authenc(hmac(sha1),"
3261 .cra_driver_name = "echainiv-authenc-"
3262 "hmac-sha1-cbc-aes-caam",
3263 .cra_blocksize = AES_BLOCK_SIZE,
3265 .setkey = aead_setkey,
3266 .setauthsize = aead_setauthsize,
3267 .encrypt = aead_encrypt,
3268 .decrypt = aead_decrypt,
3269 .ivsize = AES_BLOCK_SIZE,
3270 .maxauthsize = SHA1_DIGEST_SIZE,
3273 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3274 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3275 OP_ALG_AAI_HMAC_PRECOMP,
3276 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3283 .cra_name = "authenc(hmac(sha224),cbc(aes))",
3284 .cra_driver_name = "authenc-hmac-sha224-"
3286 .cra_blocksize = AES_BLOCK_SIZE,
3288 .setkey = aead_setkey,
3289 .setauthsize = aead_setauthsize,
3290 .encrypt = aead_encrypt,
3291 .decrypt = aead_decrypt,
3292 .ivsize = AES_BLOCK_SIZE,
3293 .maxauthsize = SHA224_DIGEST_SIZE,
3296 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3297 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3298 OP_ALG_AAI_HMAC_PRECOMP,
3299 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3305 .cra_name = "echainiv(authenc(hmac(sha224),"
3307 .cra_driver_name = "echainiv-authenc-"
3308 "hmac-sha224-cbc-aes-caam",
3309 .cra_blocksize = AES_BLOCK_SIZE,
3311 .setkey = aead_setkey,
3312 .setauthsize = aead_setauthsize,
3313 .encrypt = aead_encrypt,
3314 .decrypt = aead_decrypt,
3315 .ivsize = AES_BLOCK_SIZE,
3316 .maxauthsize = SHA224_DIGEST_SIZE,
3319 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3320 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3321 OP_ALG_AAI_HMAC_PRECOMP,
3322 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3329 .cra_name = "authenc(hmac(sha256),cbc(aes))",
3330 .cra_driver_name = "authenc-hmac-sha256-"
3332 .cra_blocksize = AES_BLOCK_SIZE,
3334 .setkey = aead_setkey,
3335 .setauthsize = aead_setauthsize,
3336 .encrypt = aead_encrypt,
3337 .decrypt = aead_decrypt,
3338 .ivsize = AES_BLOCK_SIZE,
3339 .maxauthsize = SHA256_DIGEST_SIZE,
3342 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3343 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3344 OP_ALG_AAI_HMAC_PRECOMP,
3345 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3351 .cra_name = "echainiv(authenc(hmac(sha256),"
3353 .cra_driver_name = "echainiv-authenc-"
3354 "hmac-sha256-cbc-aes-caam",
3355 .cra_blocksize = AES_BLOCK_SIZE,
3357 .setkey = aead_setkey,
3358 .setauthsize = aead_setauthsize,
3359 .encrypt = aead_encrypt,
3360 .decrypt = aead_decrypt,
3361 .ivsize = AES_BLOCK_SIZE,
3362 .maxauthsize = SHA256_DIGEST_SIZE,
3365 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3366 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3367 OP_ALG_AAI_HMAC_PRECOMP,
3368 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3375 .cra_name = "authenc(hmac(sha384),cbc(aes))",
3376 .cra_driver_name = "authenc-hmac-sha384-"
3378 .cra_blocksize = AES_BLOCK_SIZE,
3380 .setkey = aead_setkey,
3381 .setauthsize = aead_setauthsize,
3382 .encrypt = aead_encrypt,
3383 .decrypt = aead_decrypt,
3384 .ivsize = AES_BLOCK_SIZE,
3385 .maxauthsize = SHA384_DIGEST_SIZE,
3388 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3389 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3390 OP_ALG_AAI_HMAC_PRECOMP,
3391 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3397 .cra_name = "echainiv(authenc(hmac(sha384),"
3399 .cra_driver_name = "echainiv-authenc-"
3400 "hmac-sha384-cbc-aes-caam",
3401 .cra_blocksize = AES_BLOCK_SIZE,
3403 .setkey = aead_setkey,
3404 .setauthsize = aead_setauthsize,
3405 .encrypt = aead_encrypt,
3406 .decrypt = aead_decrypt,
3407 .ivsize = AES_BLOCK_SIZE,
3408 .maxauthsize = SHA384_DIGEST_SIZE,
3411 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3412 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3413 OP_ALG_AAI_HMAC_PRECOMP,
3414 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3421 .cra_name = "authenc(hmac(sha512),cbc(aes))",
3422 .cra_driver_name = "authenc-hmac-sha512-"
3424 .cra_blocksize = AES_BLOCK_SIZE,
3426 .setkey = aead_setkey,
3427 .setauthsize = aead_setauthsize,
3428 .encrypt = aead_encrypt,
3429 .decrypt = aead_decrypt,
3430 .ivsize = AES_BLOCK_SIZE,
3431 .maxauthsize = SHA512_DIGEST_SIZE,
3434 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3435 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3436 OP_ALG_AAI_HMAC_PRECOMP,
3437 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3443 .cra_name = "echainiv(authenc(hmac(sha512),"
3445 .cra_driver_name = "echainiv-authenc-"
3446 "hmac-sha512-cbc-aes-caam",
3447 .cra_blocksize = AES_BLOCK_SIZE,
3449 .setkey = aead_setkey,
3450 .setauthsize = aead_setauthsize,
3451 .encrypt = aead_encrypt,
3452 .decrypt = aead_decrypt,
3453 .ivsize = AES_BLOCK_SIZE,
3454 .maxauthsize = SHA512_DIGEST_SIZE,
3457 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3458 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3459 OP_ALG_AAI_HMAC_PRECOMP,
3460 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3467 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
3468 .cra_driver_name = "authenc-hmac-md5-"
3469 "cbc-des3_ede-caam",
3470 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3472 .setkey = aead_setkey,
3473 .setauthsize = aead_setauthsize,
3474 .encrypt = aead_encrypt,
3475 .decrypt = aead_decrypt,
3476 .ivsize = DES3_EDE_BLOCK_SIZE,
3477 .maxauthsize = MD5_DIGEST_SIZE,
3480 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3481 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3482 OP_ALG_AAI_HMAC_PRECOMP,
3483 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3489 .cra_name = "echainiv(authenc(hmac(md5),"
3491 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3492 "cbc-des3_ede-caam",
3493 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3495 .setkey = aead_setkey,
3496 .setauthsize = aead_setauthsize,
3497 .encrypt = aead_encrypt,
3498 .decrypt = aead_decrypt,
3499 .ivsize = DES3_EDE_BLOCK_SIZE,
3500 .maxauthsize = MD5_DIGEST_SIZE,
3503 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3504 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3505 OP_ALG_AAI_HMAC_PRECOMP,
3506 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3513 .cra_name = "authenc(hmac(sha1),"
3515 .cra_driver_name = "authenc-hmac-sha1-"
3516 "cbc-des3_ede-caam",
3517 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3519 .setkey = aead_setkey,
3520 .setauthsize = aead_setauthsize,
3521 .encrypt = aead_encrypt,
3522 .decrypt = aead_decrypt,
3523 .ivsize = DES3_EDE_BLOCK_SIZE,
3524 .maxauthsize = SHA1_DIGEST_SIZE,
3527 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3528 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3529 OP_ALG_AAI_HMAC_PRECOMP,
3530 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3536 .cra_name = "echainiv(authenc(hmac(sha1),"
3538 .cra_driver_name = "echainiv-authenc-"
3540 "cbc-des3_ede-caam",
3541 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3543 .setkey = aead_setkey,
3544 .setauthsize = aead_setauthsize,
3545 .encrypt = aead_encrypt,
3546 .decrypt = aead_decrypt,
3547 .ivsize = DES3_EDE_BLOCK_SIZE,
3548 .maxauthsize = SHA1_DIGEST_SIZE,
3551 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3552 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3553 OP_ALG_AAI_HMAC_PRECOMP,
3554 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3561 .cra_name = "authenc(hmac(sha224),"
3563 .cra_driver_name = "authenc-hmac-sha224-"
3564 "cbc-des3_ede-caam",
3565 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3567 .setkey = aead_setkey,
3568 .setauthsize = aead_setauthsize,
3569 .encrypt = aead_encrypt,
3570 .decrypt = aead_decrypt,
3571 .ivsize = DES3_EDE_BLOCK_SIZE,
3572 .maxauthsize = SHA224_DIGEST_SIZE,
3575 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3576 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3577 OP_ALG_AAI_HMAC_PRECOMP,
3578 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3584 .cra_name = "echainiv(authenc(hmac(sha224),"
3586 .cra_driver_name = "echainiv-authenc-"
3588 "cbc-des3_ede-caam",
3589 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3591 .setkey = aead_setkey,
3592 .setauthsize = aead_setauthsize,
3593 .encrypt = aead_encrypt,
3594 .decrypt = aead_decrypt,
3595 .ivsize = DES3_EDE_BLOCK_SIZE,
3596 .maxauthsize = SHA224_DIGEST_SIZE,
3599 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3600 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3601 OP_ALG_AAI_HMAC_PRECOMP,
3602 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3609 .cra_name = "authenc(hmac(sha256),"
3611 .cra_driver_name = "authenc-hmac-sha256-"
3612 "cbc-des3_ede-caam",
3613 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3615 .setkey = aead_setkey,
3616 .setauthsize = aead_setauthsize,
3617 .encrypt = aead_encrypt,
3618 .decrypt = aead_decrypt,
3619 .ivsize = DES3_EDE_BLOCK_SIZE,
3620 .maxauthsize = SHA256_DIGEST_SIZE,
3623 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3624 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3625 OP_ALG_AAI_HMAC_PRECOMP,
3626 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3632 .cra_name = "echainiv(authenc(hmac(sha256),"
3634 .cra_driver_name = "echainiv-authenc-"
3636 "cbc-des3_ede-caam",
3637 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3639 .setkey = aead_setkey,
3640 .setauthsize = aead_setauthsize,
3641 .encrypt = aead_encrypt,
3642 .decrypt = aead_decrypt,
3643 .ivsize = DES3_EDE_BLOCK_SIZE,
3644 .maxauthsize = SHA256_DIGEST_SIZE,
3647 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3648 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3649 OP_ALG_AAI_HMAC_PRECOMP,
3650 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3657 .cra_name = "authenc(hmac(sha384),"
3659 .cra_driver_name = "authenc-hmac-sha384-"
3660 "cbc-des3_ede-caam",
3661 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3663 .setkey = aead_setkey,
3664 .setauthsize = aead_setauthsize,
3665 .encrypt = aead_encrypt,
3666 .decrypt = aead_decrypt,
3667 .ivsize = DES3_EDE_BLOCK_SIZE,
3668 .maxauthsize = SHA384_DIGEST_SIZE,
3671 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3672 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3673 OP_ALG_AAI_HMAC_PRECOMP,
3674 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3680 .cra_name = "echainiv(authenc(hmac(sha384),"
3682 .cra_driver_name = "echainiv-authenc-"
3684 "cbc-des3_ede-caam",
3685 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3687 .setkey = aead_setkey,
3688 .setauthsize = aead_setauthsize,
3689 .encrypt = aead_encrypt,
3690 .decrypt = aead_decrypt,
3691 .ivsize = DES3_EDE_BLOCK_SIZE,
3692 .maxauthsize = SHA384_DIGEST_SIZE,
3695 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3696 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3697 OP_ALG_AAI_HMAC_PRECOMP,
3698 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3705 .cra_name = "authenc(hmac(sha512),"
3707 .cra_driver_name = "authenc-hmac-sha512-"
3708 "cbc-des3_ede-caam",
3709 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3711 .setkey = aead_setkey,
3712 .setauthsize = aead_setauthsize,
3713 .encrypt = aead_encrypt,
3714 .decrypt = aead_decrypt,
3715 .ivsize = DES3_EDE_BLOCK_SIZE,
3716 .maxauthsize = SHA512_DIGEST_SIZE,
3719 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3720 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3721 OP_ALG_AAI_HMAC_PRECOMP,
3722 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3728 .cra_name = "echainiv(authenc(hmac(sha512),"
3730 .cra_driver_name = "echainiv-authenc-"
3732 "cbc-des3_ede-caam",
3733 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3735 .setkey = aead_setkey,
3736 .setauthsize = aead_setauthsize,
3737 .encrypt = aead_encrypt,
3738 .decrypt = aead_decrypt,
3739 .ivsize = DES3_EDE_BLOCK_SIZE,
3740 .maxauthsize = SHA512_DIGEST_SIZE,
3743 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3744 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3745 OP_ALG_AAI_HMAC_PRECOMP,
3746 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3753 .cra_name = "authenc(hmac(md5),cbc(des))",
3754 .cra_driver_name = "authenc-hmac-md5-"
3756 .cra_blocksize = DES_BLOCK_SIZE,
3758 .setkey = aead_setkey,
3759 .setauthsize = aead_setauthsize,
3760 .encrypt = aead_encrypt,
3761 .decrypt = aead_decrypt,
3762 .ivsize = DES_BLOCK_SIZE,
3763 .maxauthsize = MD5_DIGEST_SIZE,
3766 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3767 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3768 OP_ALG_AAI_HMAC_PRECOMP,
3769 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3775 .cra_name = "echainiv(authenc(hmac(md5),"
3777 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3779 .cra_blocksize = DES_BLOCK_SIZE,
3781 .setkey = aead_setkey,
3782 .setauthsize = aead_setauthsize,
3783 .encrypt = aead_encrypt,
3784 .decrypt = aead_decrypt,
3785 .ivsize = DES_BLOCK_SIZE,
3786 .maxauthsize = MD5_DIGEST_SIZE,
3789 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3790 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3791 OP_ALG_AAI_HMAC_PRECOMP,
3792 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3799 .cra_name = "authenc(hmac(sha1),cbc(des))",
3800 .cra_driver_name = "authenc-hmac-sha1-"
3802 .cra_blocksize = DES_BLOCK_SIZE,
3804 .setkey = aead_setkey,
3805 .setauthsize = aead_setauthsize,
3806 .encrypt = aead_encrypt,
3807 .decrypt = aead_decrypt,
3808 .ivsize = DES_BLOCK_SIZE,
3809 .maxauthsize = SHA1_DIGEST_SIZE,
3812 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3813 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3814 OP_ALG_AAI_HMAC_PRECOMP,
3815 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3821 .cra_name = "echainiv(authenc(hmac(sha1),"
3823 .cra_driver_name = "echainiv-authenc-"
3824 "hmac-sha1-cbc-des-caam",
3825 .cra_blocksize = DES_BLOCK_SIZE,
3827 .setkey = aead_setkey,
3828 .setauthsize = aead_setauthsize,
3829 .encrypt = aead_encrypt,
3830 .decrypt = aead_decrypt,
3831 .ivsize = DES_BLOCK_SIZE,
3832 .maxauthsize = SHA1_DIGEST_SIZE,
3835 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3836 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3837 OP_ALG_AAI_HMAC_PRECOMP,
3838 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3845 .cra_name = "authenc(hmac(sha224),cbc(des))",
3846 .cra_driver_name = "authenc-hmac-sha224-"
3848 .cra_blocksize = DES_BLOCK_SIZE,
3850 .setkey = aead_setkey,
3851 .setauthsize = aead_setauthsize,
3852 .encrypt = aead_encrypt,
3853 .decrypt = aead_decrypt,
3854 .ivsize = DES_BLOCK_SIZE,
3855 .maxauthsize = SHA224_DIGEST_SIZE,
3858 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3859 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3860 OP_ALG_AAI_HMAC_PRECOMP,
3861 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3867 .cra_name = "echainiv(authenc(hmac(sha224),"
3869 .cra_driver_name = "echainiv-authenc-"
3870 "hmac-sha224-cbc-des-caam",
3871 .cra_blocksize = DES_BLOCK_SIZE,
3873 .setkey = aead_setkey,
3874 .setauthsize = aead_setauthsize,
3875 .encrypt = aead_encrypt,
3876 .decrypt = aead_decrypt,
3877 .ivsize = DES_BLOCK_SIZE,
3878 .maxauthsize = SHA224_DIGEST_SIZE,
3881 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3882 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3883 OP_ALG_AAI_HMAC_PRECOMP,
3884 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3891 .cra_name = "authenc(hmac(sha256),cbc(des))",
3892 .cra_driver_name = "authenc-hmac-sha256-"
3894 .cra_blocksize = DES_BLOCK_SIZE,
3896 .setkey = aead_setkey,
3897 .setauthsize = aead_setauthsize,
3898 .encrypt = aead_encrypt,
3899 .decrypt = aead_decrypt,
3900 .ivsize = DES_BLOCK_SIZE,
3901 .maxauthsize = SHA256_DIGEST_SIZE,
3904 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3905 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3906 OP_ALG_AAI_HMAC_PRECOMP,
3907 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3913 .cra_name = "echainiv(authenc(hmac(sha256),"
3915 .cra_driver_name = "echainiv-authenc-"
3916 "hmac-sha256-cbc-des-caam",
3917 .cra_blocksize = DES_BLOCK_SIZE,
3919 .setkey = aead_setkey,
3920 .setauthsize = aead_setauthsize,
3921 .encrypt = aead_encrypt,
3922 .decrypt = aead_decrypt,
3923 .ivsize = DES_BLOCK_SIZE,
3924 .maxauthsize = SHA256_DIGEST_SIZE,
3927 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3928 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3929 OP_ALG_AAI_HMAC_PRECOMP,
3930 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3937 .cra_name = "authenc(hmac(sha384),cbc(des))",
3938 .cra_driver_name = "authenc-hmac-sha384-"
3940 .cra_blocksize = DES_BLOCK_SIZE,
3942 .setkey = aead_setkey,
3943 .setauthsize = aead_setauthsize,
3944 .encrypt = aead_encrypt,
3945 .decrypt = aead_decrypt,
3946 .ivsize = DES_BLOCK_SIZE,
3947 .maxauthsize = SHA384_DIGEST_SIZE,
3950 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3951 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3952 OP_ALG_AAI_HMAC_PRECOMP,
3953 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3959 .cra_name = "echainiv(authenc(hmac(sha384),"
3961 .cra_driver_name = "echainiv-authenc-"
3962 "hmac-sha384-cbc-des-caam",
3963 .cra_blocksize = DES_BLOCK_SIZE,
3965 .setkey = aead_setkey,
3966 .setauthsize = aead_setauthsize,
3967 .encrypt = aead_encrypt,
3968 .decrypt = aead_decrypt,
3969 .ivsize = DES_BLOCK_SIZE,
3970 .maxauthsize = SHA384_DIGEST_SIZE,
3973 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3974 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3975 OP_ALG_AAI_HMAC_PRECOMP,
3976 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3983 .cra_name = "authenc(hmac(sha512),cbc(des))",
3984 .cra_driver_name = "authenc-hmac-sha512-"
3986 .cra_blocksize = DES_BLOCK_SIZE,
3988 .setkey = aead_setkey,
3989 .setauthsize = aead_setauthsize,
3990 .encrypt = aead_encrypt,
3991 .decrypt = aead_decrypt,
3992 .ivsize = DES_BLOCK_SIZE,
3993 .maxauthsize = SHA512_DIGEST_SIZE,
3996 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3997 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3998 OP_ALG_AAI_HMAC_PRECOMP,
3999 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4005 .cra_name = "echainiv(authenc(hmac(sha512),"
4007 .cra_driver_name = "echainiv-authenc-"
4008 "hmac-sha512-cbc-des-caam",
4009 .cra_blocksize = DES_BLOCK_SIZE,
4011 .setkey = aead_setkey,
4012 .setauthsize = aead_setauthsize,
4013 .encrypt = aead_encrypt,
4014 .decrypt = aead_decrypt,
4015 .ivsize = DES_BLOCK_SIZE,
4016 .maxauthsize = SHA512_DIGEST_SIZE,
4019 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
4020 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4021 OP_ALG_AAI_HMAC_PRECOMP,
4022 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4029 .cra_name = "authenc(hmac(md5),"
4030 "rfc3686(ctr(aes)))",
4031 .cra_driver_name = "authenc-hmac-md5-"
4032 "rfc3686-ctr-aes-caam",
4035 .setkey = aead_setkey,
4036 .setauthsize = aead_setauthsize,
4037 .encrypt = aead_encrypt,
4038 .decrypt = aead_decrypt,
4039 .ivsize = CTR_RFC3686_IV_SIZE,
4040 .maxauthsize = MD5_DIGEST_SIZE,
4043 .class1_alg_type = OP_ALG_ALGSEL_AES |
4044 OP_ALG_AAI_CTR_MOD128,
4045 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
4046 OP_ALG_AAI_HMAC_PRECOMP,
4047 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
4054 .cra_name = "seqiv(authenc("
4055 "hmac(md5),rfc3686(ctr(aes))))",
4056 .cra_driver_name = "seqiv-authenc-hmac-md5-"
4057 "rfc3686-ctr-aes-caam",
4060 .setkey = aead_setkey,
4061 .setauthsize = aead_setauthsize,
4062 .encrypt = aead_encrypt,
4063 .decrypt = aead_decrypt,
4064 .ivsize = CTR_RFC3686_IV_SIZE,
4065 .maxauthsize = MD5_DIGEST_SIZE,
4068 .class1_alg_type = OP_ALG_ALGSEL_AES |
4069 OP_ALG_AAI_CTR_MOD128,
4070 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
4071 OP_ALG_AAI_HMAC_PRECOMP,
4072 .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
4080 .cra_name = "authenc(hmac(sha1),"
4081 "rfc3686(ctr(aes)))",
4082 .cra_driver_name = "authenc-hmac-sha1-"
4083 "rfc3686-ctr-aes-caam",
4086 .setkey = aead_setkey,
4087 .setauthsize = aead_setauthsize,
4088 .encrypt = aead_encrypt,
4089 .decrypt = aead_decrypt,
4090 .ivsize = CTR_RFC3686_IV_SIZE,
4091 .maxauthsize = SHA1_DIGEST_SIZE,
4094 .class1_alg_type = OP_ALG_ALGSEL_AES |
4095 OP_ALG_AAI_CTR_MOD128,
4096 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
4097 OP_ALG_AAI_HMAC_PRECOMP,
4098 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
4105 .cra_name = "seqiv(authenc("
4106 "hmac(sha1),rfc3686(ctr(aes))))",
4107 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
4108 "rfc3686-ctr-aes-caam",
4111 .setkey = aead_setkey,
4112 .setauthsize = aead_setauthsize,
4113 .encrypt = aead_encrypt,
4114 .decrypt = aead_decrypt,
4115 .ivsize = CTR_RFC3686_IV_SIZE,
4116 .maxauthsize = SHA1_DIGEST_SIZE,
4119 .class1_alg_type = OP_ALG_ALGSEL_AES |
4120 OP_ALG_AAI_CTR_MOD128,
4121 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
4122 OP_ALG_AAI_HMAC_PRECOMP,
4123 .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
4131 .cra_name = "authenc(hmac(sha224),"
4132 "rfc3686(ctr(aes)))",
4133 .cra_driver_name = "authenc-hmac-sha224-"
4134 "rfc3686-ctr-aes-caam",
4137 .setkey = aead_setkey,
4138 .setauthsize = aead_setauthsize,
4139 .encrypt = aead_encrypt,
4140 .decrypt = aead_decrypt,
4141 .ivsize = CTR_RFC3686_IV_SIZE,
4142 .maxauthsize = SHA224_DIGEST_SIZE,
4145 .class1_alg_type = OP_ALG_ALGSEL_AES |
4146 OP_ALG_AAI_CTR_MOD128,
4147 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
4148 OP_ALG_AAI_HMAC_PRECOMP,
4149 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
4156 .cra_name = "seqiv(authenc("
4157 "hmac(sha224),rfc3686(ctr(aes))))",
4158 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
4159 "rfc3686-ctr-aes-caam",
4162 .setkey = aead_setkey,
4163 .setauthsize = aead_setauthsize,
4164 .encrypt = aead_encrypt,
4165 .decrypt = aead_decrypt,
4166 .ivsize = CTR_RFC3686_IV_SIZE,
4167 .maxauthsize = SHA224_DIGEST_SIZE,
4170 .class1_alg_type = OP_ALG_ALGSEL_AES |
4171 OP_ALG_AAI_CTR_MOD128,
4172 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
4173 OP_ALG_AAI_HMAC_PRECOMP,
4174 .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
4182 .cra_name = "authenc(hmac(sha256),"
4183 "rfc3686(ctr(aes)))",
4184 .cra_driver_name = "authenc-hmac-sha256-"
4185 "rfc3686-ctr-aes-caam",
4188 .setkey = aead_setkey,
4189 .setauthsize = aead_setauthsize,
4190 .encrypt = aead_encrypt,
4191 .decrypt = aead_decrypt,
4192 .ivsize = CTR_RFC3686_IV_SIZE,
4193 .maxauthsize = SHA256_DIGEST_SIZE,
4196 .class1_alg_type = OP_ALG_ALGSEL_AES |
4197 OP_ALG_AAI_CTR_MOD128,
4198 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
4199 OP_ALG_AAI_HMAC_PRECOMP,
4200 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
4207 .cra_name = "seqiv(authenc(hmac(sha256),"
4208 "rfc3686(ctr(aes))))",
4209 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
4210 "rfc3686-ctr-aes-caam",
4213 .setkey = aead_setkey,
4214 .setauthsize = aead_setauthsize,
4215 .encrypt = aead_encrypt,
4216 .decrypt = aead_decrypt,
4217 .ivsize = CTR_RFC3686_IV_SIZE,
4218 .maxauthsize = SHA256_DIGEST_SIZE,
4221 .class1_alg_type = OP_ALG_ALGSEL_AES |
4222 OP_ALG_AAI_CTR_MOD128,
4223 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
4224 OP_ALG_AAI_HMAC_PRECOMP,
4225 .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
4233 .cra_name = "authenc(hmac(sha384),"
4234 "rfc3686(ctr(aes)))",
4235 .cra_driver_name = "authenc-hmac-sha384-"
4236 "rfc3686-ctr-aes-caam",
4239 .setkey = aead_setkey,
4240 .setauthsize = aead_setauthsize,
4241 .encrypt = aead_encrypt,
4242 .decrypt = aead_decrypt,
4243 .ivsize = CTR_RFC3686_IV_SIZE,
4244 .maxauthsize = SHA384_DIGEST_SIZE,
4247 .class1_alg_type = OP_ALG_ALGSEL_AES |
4248 OP_ALG_AAI_CTR_MOD128,
4249 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4250 OP_ALG_AAI_HMAC_PRECOMP,
4251 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4258 .cra_name = "seqiv(authenc(hmac(sha384),"
4259 "rfc3686(ctr(aes))))",
4260 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
4261 "rfc3686-ctr-aes-caam",
4264 .setkey = aead_setkey,
4265 .setauthsize = aead_setauthsize,
4266 .encrypt = aead_encrypt,
4267 .decrypt = aead_decrypt,
4268 .ivsize = CTR_RFC3686_IV_SIZE,
4269 .maxauthsize = SHA384_DIGEST_SIZE,
4272 .class1_alg_type = OP_ALG_ALGSEL_AES |
4273 OP_ALG_AAI_CTR_MOD128,
4274 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4275 OP_ALG_AAI_HMAC_PRECOMP,
4276 .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4284 .cra_name = "authenc(hmac(sha512),"
4285 "rfc3686(ctr(aes)))",
4286 .cra_driver_name = "authenc-hmac-sha512-"
4287 "rfc3686-ctr-aes-caam",
4290 .setkey = aead_setkey,
4291 .setauthsize = aead_setauthsize,
4292 .encrypt = aead_encrypt,
4293 .decrypt = aead_decrypt,
4294 .ivsize = CTR_RFC3686_IV_SIZE,
4295 .maxauthsize = SHA512_DIGEST_SIZE,
4298 .class1_alg_type = OP_ALG_ALGSEL_AES |
4299 OP_ALG_AAI_CTR_MOD128,
4300 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4301 OP_ALG_AAI_HMAC_PRECOMP,
4302 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4309 .cra_name = "seqiv(authenc(hmac(sha512),"
4310 "rfc3686(ctr(aes))))",
4311 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
4312 "rfc3686-ctr-aes-caam",
4315 .setkey = aead_setkey,
4316 .setauthsize = aead_setauthsize,
4317 .encrypt = aead_encrypt,
4318 .decrypt = aead_decrypt,
4319 .ivsize = CTR_RFC3686_IV_SIZE,
4320 .maxauthsize = SHA512_DIGEST_SIZE,
4323 .class1_alg_type = OP_ALG_ALGSEL_AES |
4324 OP_ALG_AAI_CTR_MOD128,
4325 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4326 OP_ALG_AAI_HMAC_PRECOMP,
4327 .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4334 struct caam_crypto_alg {
4335 struct crypto_alg crypto_alg;
4336 struct list_head entry;
4337 struct caam_alg_entry caam;
4340 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
4342 ctx->jrdev = caam_jr_alloc();
4343 if (IS_ERR(ctx->jrdev)) {
4344 pr_err("Job Ring Device allocation for transform failed\n");
4345 return PTR_ERR(ctx->jrdev);
4348 /* copy descriptor header template value */
4349 ctx->class1_alg_type = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
4350 ctx->class2_alg_type = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
4351 ctx->alg_op = OP_TYPE_CLASS2_ALG | caam->alg_op;
4356 static int caam_cra_init(struct crypto_tfm *tfm)
4358 struct crypto_alg *alg = tfm->__crt_alg;
4359 struct caam_crypto_alg *caam_alg =
4360 container_of(alg, struct caam_crypto_alg, crypto_alg);
4361 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
4363 return caam_init_common(ctx, &caam_alg->caam);
4366 static int caam_aead_init(struct crypto_aead *tfm)
4368 struct aead_alg *alg = crypto_aead_alg(tfm);
4369 struct caam_aead_alg *caam_alg =
4370 container_of(alg, struct caam_aead_alg, aead);
4371 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
4373 return caam_init_common(ctx, &caam_alg->caam);
4376 static void caam_exit_common(struct caam_ctx *ctx)
4378 if (ctx->sh_desc_enc_dma &&
4379 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_enc_dma))
4380 dma_unmap_single(ctx->jrdev, ctx->sh_desc_enc_dma,
4381 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
4382 if (ctx->sh_desc_dec_dma &&
4383 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_dec_dma))
4384 dma_unmap_single(ctx->jrdev, ctx->sh_desc_dec_dma,
4385 desc_bytes(ctx->sh_desc_dec), DMA_TO_DEVICE);
4386 if (ctx->sh_desc_givenc_dma &&
4387 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_givenc_dma))
4388 dma_unmap_single(ctx->jrdev, ctx->sh_desc_givenc_dma,
4389 desc_bytes(ctx->sh_desc_givenc),
4392 !dma_mapping_error(ctx->jrdev, ctx->key_dma))
4393 dma_unmap_single(ctx->jrdev, ctx->key_dma,
4394 ctx->enckeylen + ctx->split_key_pad_len,
4397 caam_jr_free(ctx->jrdev);
4400 static void caam_cra_exit(struct crypto_tfm *tfm)
4402 caam_exit_common(crypto_tfm_ctx(tfm));
4405 static void caam_aead_exit(struct crypto_aead *tfm)
4407 caam_exit_common(crypto_aead_ctx(tfm));
4410 static void __exit caam_algapi_exit(void)
4413 struct caam_crypto_alg *t_alg, *n;
4416 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
4417 struct caam_aead_alg *t_alg = driver_aeads + i;
4419 if (t_alg->registered)
4420 crypto_unregister_aead(&t_alg->aead);
4426 list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
4427 crypto_unregister_alg(&t_alg->crypto_alg);
4428 list_del(&t_alg->entry);
4433 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
4436 struct caam_crypto_alg *t_alg;
4437 struct crypto_alg *alg;
4439 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
4441 pr_err("failed to allocate t_alg\n");
4442 return ERR_PTR(-ENOMEM);
4445 alg = &t_alg->crypto_alg;
4447 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
4448 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
4449 template->driver_name);
4450 alg->cra_module = THIS_MODULE;
4451 alg->cra_init = caam_cra_init;
4452 alg->cra_exit = caam_cra_exit;
4453 alg->cra_priority = CAAM_CRA_PRIORITY;
4454 alg->cra_blocksize = template->blocksize;
4455 alg->cra_alignmask = 0;
4456 alg->cra_ctxsize = sizeof(struct caam_ctx);
4457 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
4459 switch (template->type) {
4460 case CRYPTO_ALG_TYPE_GIVCIPHER:
4461 alg->cra_type = &crypto_givcipher_type;
4462 alg->cra_ablkcipher = template->template_ablkcipher;
4464 case CRYPTO_ALG_TYPE_ABLKCIPHER:
4465 alg->cra_type = &crypto_ablkcipher_type;
4466 alg->cra_ablkcipher = template->template_ablkcipher;
4470 t_alg->caam.class1_alg_type = template->class1_alg_type;
4471 t_alg->caam.class2_alg_type = template->class2_alg_type;
4472 t_alg->caam.alg_op = template->alg_op;
4477 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
4479 struct aead_alg *alg = &t_alg->aead;
4481 alg->base.cra_module = THIS_MODULE;
4482 alg->base.cra_priority = CAAM_CRA_PRIORITY;
4483 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
4484 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
4486 alg->init = caam_aead_init;
4487 alg->exit = caam_aead_exit;
4490 static int __init caam_algapi_init(void)
4492 struct device_node *dev_node;
4493 struct platform_device *pdev;
4494 struct device *ctrldev;
4495 struct caam_drv_private *priv;
4497 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
4498 unsigned int md_limit = SHA512_DIGEST_SIZE;
4499 bool registered = false;
4501 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
4503 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
4508 pdev = of_find_device_by_node(dev_node);
4510 of_node_put(dev_node);
4514 ctrldev = &pdev->dev;
4515 priv = dev_get_drvdata(ctrldev);
4516 of_node_put(dev_node);
4519 * If priv is NULL, it's probably because the caam driver wasn't
4520 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
4526 INIT_LIST_HEAD(&alg_list);
4529 * Register crypto algorithms the device supports.
4530 * First, detect presence and attributes of DES, AES, and MD blocks.
4532 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
4533 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
4534 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
4535 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
4536 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
4538 /* If MD is present, limit digest size based on LP256 */
4539 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
4540 md_limit = SHA256_DIGEST_SIZE;
4542 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
4543 struct caam_crypto_alg *t_alg;
4544 struct caam_alg_template *alg = driver_algs + i;
4545 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
4547 /* Skip DES algorithms if not supported by device */
4549 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
4550 (alg_sel == OP_ALG_ALGSEL_DES)))
4553 /* Skip AES algorithms if not supported by device */
4554 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
4558 * Check support for AES modes not available
4561 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
4562 if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
4566 t_alg = caam_alg_alloc(alg);
4567 if (IS_ERR(t_alg)) {
4568 err = PTR_ERR(t_alg);
4569 pr_warn("%s alg allocation failed\n", alg->driver_name);
4573 err = crypto_register_alg(&t_alg->crypto_alg);
4575 pr_warn("%s alg registration failed\n",
4576 t_alg->crypto_alg.cra_driver_name);
4581 list_add_tail(&t_alg->entry, &alg_list);
4585 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
4586 struct caam_aead_alg *t_alg = driver_aeads + i;
4587 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
4589 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
4591 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
4593 /* Skip DES algorithms if not supported by device */
4595 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
4596 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
4599 /* Skip AES algorithms if not supported by device */
4600 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
4604 * Check support for AES algorithms not available
4607 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
4608 if (alg_aai == OP_ALG_AAI_GCM)
4612 * Skip algorithms requiring message digests
4613 * if MD or MD size is not supported by device.
4616 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
4619 caam_aead_alg_init(t_alg);
4621 err = crypto_register_aead(&t_alg->aead);
4623 pr_warn("%s alg registration failed\n",
4624 t_alg->aead.base.cra_driver_name);
4628 t_alg->registered = true;
4633 pr_info("caam algorithms registered in /proc/crypto\n");
4638 module_init(caam_algapi_init);
4639 module_exit(caam_algapi_exit);
4641 MODULE_LICENSE("GPL");
4642 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
4643 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");