2 * Shared descriptors for aead, ablkcipher algorithms
8 #include "desc_constr.h"
9 #include "caamalg_desc.h"
12 * For aead functions, read payload and write payload,
13 * both of which are specified in req->src and req->dst
15 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
17 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
18 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
19 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
22 /* Set DK bit in class 1 operation if shared */
23 static inline void append_dec_op1(u32 *desc, u32 type)
25 u32 *jump_cmd, *uncond_jump_cmd;
27 /* DK bit is valid only for AES */
28 if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
29 append_operation(desc, type | OP_ALG_AS_INITFINAL |
34 jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
35 append_operation(desc, type | OP_ALG_AS_INITFINAL |
37 uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
38 set_jump_tgt_here(desc, jump_cmd);
39 append_operation(desc, type | OP_ALG_AS_INITFINAL |
40 OP_ALG_DECRYPT | OP_ALG_AAI_DK);
41 set_jump_tgt_here(desc, uncond_jump_cmd);
45 * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
46 * (non-protocol) with no (null) encryption.
47 * @desc: pointer to buffer used for descriptor construction
48 * @adata: pointer to authentication transform definitions. Note that since a
49 * split key is to be used, the size of the split key itself is
50 * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
51 * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
52 * @icvsize: integrity check value (ICV) size (truncated or full)
54 * Note: Requires an MDHA split key.
56 void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
59 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
61 init_sh_desc(desc, HDR_SHARE_SERIAL);
63 /* Skip if already shared */
64 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
66 if (adata->key_inline)
67 append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
68 adata->keylen, CLASS_2 | KEY_DEST_MDHA_SPLIT |
71 append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
72 KEY_DEST_MDHA_SPLIT | KEY_ENC);
73 set_jump_tgt_here(desc, key_jump_cmd);
75 /* assoclen + cryptlen = seqinlen */
76 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
78 /* Prepare to read and write cryptlen + assoclen bytes */
79 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
80 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
83 * MOVE_LEN opcode is not available in all SEC HW revisions,
84 * thus need to do some magic, i.e. self-patch the descriptor
87 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
89 (0x6 << MOVE_LEN_SHIFT));
90 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
93 (0x8 << MOVE_LEN_SHIFT));
95 /* Class 2 operation */
96 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
99 /* Read and write cryptlen bytes */
100 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
102 set_move_tgt_here(desc, read_move_cmd);
103 set_move_tgt_here(desc, write_move_cmd);
104 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
105 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
109 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
110 LDST_SRCDST_BYTE_CONTEXT);
113 print_hex_dump(KERN_ERR,
114 "aead null enc shdesc@" __stringify(__LINE__)": ",
115 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
118 EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
121 * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
122 * (non-protocol) with no (null) decryption.
123 * @desc: pointer to buffer used for descriptor construction
124 * @adata: pointer to authentication transform definitions. Note that since a
125 * split key is to be used, the size of the split key itself is
126 * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
127 * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
128 * @icvsize: integrity check value (ICV) size (truncated or full)
130 * Note: Requires an MDHA split key.
132 void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
133 unsigned int icvsize)
135 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
137 init_sh_desc(desc, HDR_SHARE_SERIAL);
139 /* Skip if already shared */
140 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
142 if (adata->key_inline)
143 append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
144 adata->keylen, CLASS_2 |
145 KEY_DEST_MDHA_SPLIT | KEY_ENC);
147 append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
148 KEY_DEST_MDHA_SPLIT | KEY_ENC);
149 set_jump_tgt_here(desc, key_jump_cmd);
151 /* Class 2 operation */
152 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
153 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
155 /* assoclen + cryptlen = seqoutlen */
156 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
158 /* Prepare to read and write cryptlen + assoclen bytes */
159 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
160 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
163 * MOVE_LEN opcode is not available in all SEC HW revisions,
164 * thus need to do some magic, i.e. self-patch the descriptor
167 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
169 (0x6 << MOVE_LEN_SHIFT));
170 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
173 (0x8 << MOVE_LEN_SHIFT));
175 /* Read and write cryptlen bytes */
176 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
179 * Insert a NOP here, since we need at least 4 instructions between
180 * code patching the descriptor buffer and the location being patched.
182 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
183 set_jump_tgt_here(desc, jump_cmd);
185 set_move_tgt_here(desc, read_move_cmd);
186 set_move_tgt_here(desc, write_move_cmd);
187 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
188 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
190 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
193 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
194 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
197 print_hex_dump(KERN_ERR,
198 "aead null dec shdesc@" __stringify(__LINE__)": ",
199 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
202 EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
204 static void init_sh_desc_key_aead(u32 * const desc,
205 struct alginfo * const cdata,
206 struct alginfo * const adata,
207 const bool is_rfc3686, u32 *nonce)
210 unsigned int enckeylen = cdata->keylen;
212 /* Note: Context registers are saved. */
213 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
215 /* Skip if already shared */
216 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
221 * | key = {AUTH_KEY, ENC_KEY, NONCE}
222 * | enckeylen = encryption key size + nonce size
225 enckeylen -= CTR_RFC3686_NONCE_SIZE;
227 if (adata->key_inline)
228 append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
229 adata->keylen, CLASS_2 |
230 KEY_DEST_MDHA_SPLIT | KEY_ENC);
232 append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
233 KEY_DEST_MDHA_SPLIT | KEY_ENC);
235 if (cdata->key_inline)
236 append_key_as_imm(desc, cdata->key_virt, enckeylen,
237 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
239 append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
242 /* Load Counter into CONTEXT1 reg */
244 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
246 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
249 MOVE_DEST_CLASS1CTX |
250 (16 << MOVE_OFFSET_SHIFT) |
251 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
254 set_jump_tgt_here(desc, key_jump_cmd);
258 * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
260 * @desc: pointer to buffer used for descriptor construction
261 * @cdata: pointer to block cipher transform definitions
262 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
263 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
264 * @adata: pointer to authentication transform definitions. Note that since a
265 * split key is to be used, the size of the split key itself is
266 * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
267 * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
268 * @ivsize: initialization vector size
269 * @icvsize: integrity check value (ICV) size (truncated or full)
270 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
271 * @nonce: pointer to rfc3686 nonce
272 * @ctx1_iv_off: IV offset in CONTEXT1 register
273 * @is_qi: true when called from caam/qi
275 * Note: Requires an MDHA split key.
277 void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
278 struct alginfo *adata, unsigned int ivsize,
279 unsigned int icvsize, const bool is_rfc3686,
280 u32 *nonce, const u32 ctx1_iv_off, const bool is_qi)
282 /* Note: Context registers are saved. */
283 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
285 /* Class 2 operation */
286 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
292 /* REG3 = assoclen */
293 append_seq_load(desc, 4, LDST_CLASS_DECO |
294 LDST_SRCDST_WORD_DECO_MATH3 |
295 (4 << LDST_OFFSET_SHIFT));
297 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
298 JUMP_COND_CALM | JUMP_COND_NCP |
299 JUMP_COND_NOP | JUMP_COND_NIP |
301 set_jump_tgt_here(desc, wait_load_cmd);
303 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
304 LDST_SRCDST_BYTE_CONTEXT |
305 (ctx1_iv_off << LDST_OFFSET_SHIFT));
308 /* Read and write assoclen bytes */
309 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
310 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
312 /* Skip assoc data */
313 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
315 /* read assoc before reading payload */
316 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
319 /* Load Counter into CONTEXT1 reg */
321 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
322 LDST_SRCDST_BYTE_CONTEXT |
323 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
326 /* Class 1 operation */
327 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
330 /* Read and write cryptlen bytes */
331 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
332 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
333 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
336 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
337 LDST_SRCDST_BYTE_CONTEXT);
340 print_hex_dump(KERN_ERR, "aead enc shdesc@" __stringify(__LINE__)": ",
341 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
344 EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
347 * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
349 * @desc: pointer to buffer used for descriptor construction
350 * @cdata: pointer to block cipher transform definitions
351 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
352 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
353 * @adata: pointer to authentication transform definitions. Note that since a
354 * split key is to be used, the size of the split key itself is
355 * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
356 * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
357 * @ivsize: initialization vector size
358 * @icvsize: integrity check value (ICV) size (truncated or full)
359 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
360 * @nonce: pointer to rfc3686 nonce
361 * @ctx1_iv_off: IV offset in CONTEXT1 register
362 * @is_qi: true when called from caam/qi
364 * Note: Requires an MDHA split key.
366 void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
367 struct alginfo *adata, unsigned int ivsize,
368 unsigned int icvsize, const bool geniv,
369 const bool is_rfc3686, u32 *nonce,
370 const u32 ctx1_iv_off, const bool is_qi)
372 /* Note: Context registers are saved. */
373 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
375 /* Class 2 operation */
376 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
377 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
382 /* REG3 = assoclen */
383 append_seq_load(desc, 4, LDST_CLASS_DECO |
384 LDST_SRCDST_WORD_DECO_MATH3 |
385 (4 << LDST_OFFSET_SHIFT));
387 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
388 JUMP_COND_CALM | JUMP_COND_NCP |
389 JUMP_COND_NOP | JUMP_COND_NIP |
391 set_jump_tgt_here(desc, wait_load_cmd);
394 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
395 LDST_SRCDST_BYTE_CONTEXT |
396 (ctx1_iv_off << LDST_OFFSET_SHIFT));
399 /* Read and write assoclen bytes */
400 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
402 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM, ivsize);
404 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
406 /* Skip assoc data */
407 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
409 /* read assoc before reading payload */
410 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
414 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
415 LDST_SRCDST_BYTE_CONTEXT |
416 (ctx1_iv_off << LDST_OFFSET_SHIFT));
417 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
418 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
421 /* Load Counter into CONTEXT1 reg */
423 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
424 LDST_SRCDST_BYTE_CONTEXT |
425 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
428 /* Choose operation */
430 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
433 append_dec_op1(desc, cdata->algtype);
435 /* Read and write cryptlen bytes */
436 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
437 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
438 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
441 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
442 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
445 print_hex_dump(KERN_ERR, "aead dec shdesc@" __stringify(__LINE__)": ",
446 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
449 EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
452 * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
453 * (non-protocol) with HW-generated initialization
455 * @desc: pointer to buffer used for descriptor construction
456 * @cdata: pointer to block cipher transform definitions
457 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
458 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
459 * @adata: pointer to authentication transform definitions. Note that since a
460 * split key is to be used, the size of the split key itself is
461 * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
462 * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
463 * @ivsize: initialization vector size
464 * @icvsize: integrity check value (ICV) size (truncated or full)
465 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
466 * @nonce: pointer to rfc3686 nonce
467 * @ctx1_iv_off: IV offset in CONTEXT1 register
468 * @is_qi: true when called from caam/qi
470 * Note: Requires an MDHA split key.
472 void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
473 struct alginfo *adata, unsigned int ivsize,
474 unsigned int icvsize, const bool is_rfc3686,
475 u32 *nonce, const u32 ctx1_iv_off,
481 /* Note: Context registers are saved. */
482 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
487 /* REG3 = assoclen */
488 append_seq_load(desc, 4, LDST_CLASS_DECO |
489 LDST_SRCDST_WORD_DECO_MATH3 |
490 (4 << LDST_OFFSET_SHIFT));
492 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
493 JUMP_COND_CALM | JUMP_COND_NCP |
494 JUMP_COND_NOP | JUMP_COND_NIP |
496 set_jump_tgt_here(desc, wait_load_cmd);
501 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
502 LDST_SRCDST_BYTE_CONTEXT |
503 (ctx1_iv_off << LDST_OFFSET_SHIFT));
509 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
510 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
511 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
512 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
513 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
514 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
515 append_move(desc, MOVE_WAITCOMP |
516 MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
517 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
518 (ivsize << MOVE_LEN_SHIFT));
519 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
522 /* Copy IV to class 1 context */
523 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
524 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
525 (ivsize << MOVE_LEN_SHIFT));
527 /* Return to encryption */
528 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
531 /* Read and write assoclen bytes */
532 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
533 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
535 /* Skip assoc data */
536 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
538 /* read assoc before reading payload */
539 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
542 /* Copy iv from outfifo to class 2 fifo */
543 moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
544 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
545 append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
546 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
547 append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
548 LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
550 /* Load Counter into CONTEXT1 reg */
552 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
553 LDST_SRCDST_BYTE_CONTEXT |
554 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
557 /* Class 1 operation */
558 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
561 /* Will write ivsize + cryptlen */
562 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
564 /* Not need to reload iv */
565 append_seq_fifo_load(desc, ivsize,
568 /* Will read cryptlen */
569 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
572 * Wait for IV transfer (ofifo -> class2) to finish before starting
573 * ciphertext transfer (ofifo -> external memory).
575 wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NIFP);
576 set_jump_tgt_here(desc, wait_cmd);
578 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
579 FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
580 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
583 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
584 LDST_SRCDST_BYTE_CONTEXT);
587 print_hex_dump(KERN_ERR,
588 "aead givenc shdesc@" __stringify(__LINE__)": ",
589 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
592 EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
595 * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
596 * @desc: pointer to buffer used for descriptor construction
597 * @cdata: pointer to block cipher transform definitions
598 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
599 * @icvsize: integrity check value (ICV) size (truncated or full)
601 void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
602 unsigned int icvsize)
604 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
605 *zero_assoc_jump_cmd2;
607 init_sh_desc(desc, HDR_SHARE_SERIAL);
609 /* skip key loading if they are loaded due to sharing */
610 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
612 if (cdata->key_inline)
613 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
614 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
616 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
618 set_jump_tgt_here(desc, key_jump_cmd);
620 /* class 1 operation */
621 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
624 /* if assoclen + cryptlen is ZERO, skip to ICV write */
625 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
626 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
629 /* if assoclen is ZERO, skip reading the assoc data */
630 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
631 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
634 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
636 /* skip assoc data */
637 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
639 /* cryptlen = seqinlen - assoclen */
640 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
642 /* if cryptlen is ZERO jump to zero-payload commands */
643 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
646 /* read assoc data */
647 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
648 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
649 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
651 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
653 /* write encrypted data */
654 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
656 /* read payload data */
657 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
658 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
660 /* jump the zero-payload commands */
661 append_jump(desc, JUMP_TEST_ALL | 2);
663 /* zero-payload commands */
664 set_jump_tgt_here(desc, zero_payload_jump_cmd);
666 /* read assoc data */
667 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
668 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
670 /* There is no input data */
671 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
674 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
675 LDST_SRCDST_BYTE_CONTEXT);
678 print_hex_dump(KERN_ERR, "gcm enc shdesc@" __stringify(__LINE__)": ",
679 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
682 EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
685 * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
686 * @desc: pointer to buffer used for descriptor construction
687 * @cdata: pointer to block cipher transform definitions
688 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
689 * @icvsize: integrity check value (ICV) size (truncated or full)
691 void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
692 unsigned int icvsize)
694 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
696 init_sh_desc(desc, HDR_SHARE_SERIAL);
698 /* skip key loading if they are loaded due to sharing */
699 key_jump_cmd = append_jump(desc, JUMP_JSL |
700 JUMP_TEST_ALL | JUMP_COND_SHRD);
701 if (cdata->key_inline)
702 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
703 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
705 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
707 set_jump_tgt_here(desc, key_jump_cmd);
709 /* class 1 operation */
710 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
711 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
713 /* if assoclen is ZERO, skip reading the assoc data */
714 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
715 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
718 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
720 /* skip assoc data */
721 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
723 /* read assoc data */
724 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
725 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
727 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
729 /* cryptlen = seqoutlen - assoclen */
730 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
732 /* jump to zero-payload command if cryptlen is zero */
733 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
736 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
738 /* store encrypted data */
739 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
741 /* read payload data */
742 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
743 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
745 /* zero-payload command */
746 set_jump_tgt_here(desc, zero_payload_jump_cmd);
749 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
750 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
753 print_hex_dump(KERN_ERR, "gcm dec shdesc@" __stringify(__LINE__)": ",
754 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
757 EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
760 * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
762 * @desc: pointer to buffer used for descriptor construction
763 * @cdata: pointer to block cipher transform definitions
764 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
765 * @icvsize: integrity check value (ICV) size (truncated or full)
767 void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
768 unsigned int icvsize)
772 init_sh_desc(desc, HDR_SHARE_SERIAL);
774 /* Skip key loading if it is loaded due to sharing */
775 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
777 if (cdata->key_inline)
778 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
779 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
781 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
783 set_jump_tgt_here(desc, key_jump_cmd);
785 /* Class 1 operation */
786 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
789 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
790 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
792 /* Read assoc data */
793 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
794 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
797 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
799 /* Will read cryptlen bytes */
800 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
802 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
803 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
805 /* Skip assoc data */
806 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
808 /* cryptlen = seqoutlen - assoclen */
809 append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
811 /* Write encrypted data */
812 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
814 /* Read payload data */
815 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
816 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
819 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
820 LDST_SRCDST_BYTE_CONTEXT);
823 print_hex_dump(KERN_ERR,
824 "rfc4106 enc shdesc@" __stringify(__LINE__)": ",
825 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
828 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
831 * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
833 * @desc: pointer to buffer used for descriptor construction
834 * @cdata: pointer to block cipher transform definitions
835 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
836 * @icvsize: integrity check value (ICV) size (truncated or full)
838 void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
839 unsigned int icvsize)
843 init_sh_desc(desc, HDR_SHARE_SERIAL);
845 /* Skip key loading if it is loaded due to sharing */
846 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
848 if (cdata->key_inline)
849 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
850 cdata->keylen, CLASS_1 |
853 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
855 set_jump_tgt_here(desc, key_jump_cmd);
857 /* Class 1 operation */
858 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
859 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
861 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
862 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
864 /* Read assoc data */
865 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
866 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
869 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
871 /* Will read cryptlen bytes */
872 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
874 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
875 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
877 /* Skip assoc data */
878 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
880 /* Will write cryptlen bytes */
881 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
883 /* Store payload data */
884 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
886 /* Read encrypted data */
887 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
888 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
891 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
892 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
895 print_hex_dump(KERN_ERR,
896 "rfc4106 dec shdesc@" __stringify(__LINE__)": ",
897 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
900 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
903 * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
905 * @desc: pointer to buffer used for descriptor construction
906 * @cdata: pointer to block cipher transform definitions
907 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
908 * @icvsize: integrity check value (ICV) size (truncated or full)
910 void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
911 unsigned int icvsize)
913 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
915 init_sh_desc(desc, HDR_SHARE_SERIAL);
917 /* Skip key loading if it is loaded due to sharing */
918 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
920 if (cdata->key_inline)
921 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
922 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
924 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
926 set_jump_tgt_here(desc, key_jump_cmd);
928 /* Class 1 operation */
929 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
932 /* assoclen + cryptlen = seqinlen */
933 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
936 * MOVE_LEN opcode is not available in all SEC HW revisions,
937 * thus need to do some magic, i.e. self-patch the descriptor
940 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
941 (0x6 << MOVE_LEN_SHIFT));
942 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
943 (0x8 << MOVE_LEN_SHIFT));
945 /* Will read assoclen + cryptlen bytes */
946 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
948 /* Will write assoclen + cryptlen bytes */
949 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
951 /* Read and write assoclen + cryptlen bytes */
952 aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
954 set_move_tgt_here(desc, read_move_cmd);
955 set_move_tgt_here(desc, write_move_cmd);
956 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
957 /* Move payload data to OFIFO */
958 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
961 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
962 LDST_SRCDST_BYTE_CONTEXT);
965 print_hex_dump(KERN_ERR,
966 "rfc4543 enc shdesc@" __stringify(__LINE__)": ",
967 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
970 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
973 * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
975 * @desc: pointer to buffer used for descriptor construction
976 * @cdata: pointer to block cipher transform definitions
977 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
978 * @icvsize: integrity check value (ICV) size (truncated or full)
980 void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
981 unsigned int icvsize)
983 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
985 init_sh_desc(desc, HDR_SHARE_SERIAL);
987 /* Skip key loading if it is loaded due to sharing */
988 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
990 if (cdata->key_inline)
991 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
992 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
994 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
996 set_jump_tgt_here(desc, key_jump_cmd);
998 /* Class 1 operation */
999 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1000 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1002 /* assoclen + cryptlen = seqoutlen */
1003 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1006 * MOVE_LEN opcode is not available in all SEC HW revisions,
1007 * thus need to do some magic, i.e. self-patch the descriptor
1010 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1011 (0x6 << MOVE_LEN_SHIFT));
1012 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1013 (0x8 << MOVE_LEN_SHIFT));
1015 /* Will read assoclen + cryptlen bytes */
1016 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1018 /* Will write assoclen + cryptlen bytes */
1019 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1021 /* Store payload data */
1022 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1024 /* In-snoop assoclen + cryptlen data */
1025 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1026 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1028 set_move_tgt_here(desc, read_move_cmd);
1029 set_move_tgt_here(desc, write_move_cmd);
1030 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1031 /* Move payload data to OFIFO */
1032 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1033 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1036 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1037 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1040 print_hex_dump(KERN_ERR,
1041 "rfc4543 dec shdesc@" __stringify(__LINE__)": ",
1042 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1045 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
1048 * For ablkcipher encrypt and decrypt, read from req->src and
1051 static inline void ablkcipher_append_src_dst(u32 *desc)
1053 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1054 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1055 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
1056 KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1057 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
1061 * cnstr_shdsc_ablkcipher_encap - ablkcipher encapsulation shared descriptor
1062 * @desc: pointer to buffer used for descriptor construction
1063 * @cdata: pointer to block cipher transform definitions
1064 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1065 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
1066 * @ivsize: initialization vector size
1067 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1068 * @ctx1_iv_off: IV offset in CONTEXT1 register
1070 void cnstr_shdsc_ablkcipher_encap(u32 * const desc, struct alginfo *cdata,
1071 unsigned int ivsize, const bool is_rfc3686,
1072 const u32 ctx1_iv_off)
1076 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1077 /* Skip if already shared */
1078 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1081 /* Load class1 key only */
1082 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1083 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1085 /* Load nonce into CONTEXT1 reg */
1087 u8 *nonce = cdata->key_virt + cdata->keylen;
1089 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1090 LDST_CLASS_IND_CCB |
1091 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1092 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1093 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1094 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1097 set_jump_tgt_here(desc, key_jump_cmd);
1100 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1101 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1103 /* Load counter into CONTEXT1 reg */
1105 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1106 LDST_SRCDST_BYTE_CONTEXT |
1107 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1108 LDST_OFFSET_SHIFT));
1110 /* Load operation */
1111 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1114 /* Perform operation */
1115 ablkcipher_append_src_dst(desc);
1118 print_hex_dump(KERN_ERR,
1119 "ablkcipher enc shdesc@" __stringify(__LINE__)": ",
1120 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1123 EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_encap);
1126 * cnstr_shdsc_ablkcipher_decap - ablkcipher decapsulation shared descriptor
1127 * @desc: pointer to buffer used for descriptor construction
1128 * @cdata: pointer to block cipher transform definitions
1129 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1130 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
1131 * @ivsize: initialization vector size
1132 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1133 * @ctx1_iv_off: IV offset in CONTEXT1 register
1135 void cnstr_shdsc_ablkcipher_decap(u32 * const desc, struct alginfo *cdata,
1136 unsigned int ivsize, const bool is_rfc3686,
1137 const u32 ctx1_iv_off)
1141 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1142 /* Skip if already shared */
1143 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1146 /* Load class1 key only */
1147 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1148 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1150 /* Load nonce into CONTEXT1 reg */
1152 u8 *nonce = cdata->key_virt + cdata->keylen;
1154 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1155 LDST_CLASS_IND_CCB |
1156 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1157 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1158 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1159 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1162 set_jump_tgt_here(desc, key_jump_cmd);
1165 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1166 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1168 /* Load counter into CONTEXT1 reg */
1170 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1171 LDST_SRCDST_BYTE_CONTEXT |
1172 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1173 LDST_OFFSET_SHIFT));
1175 /* Choose operation */
1177 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1180 append_dec_op1(desc, cdata->algtype);
1182 /* Perform operation */
1183 ablkcipher_append_src_dst(desc);
1186 print_hex_dump(KERN_ERR,
1187 "ablkcipher dec shdesc@" __stringify(__LINE__)": ",
1188 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1191 EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_decap);
1194 * cnstr_shdsc_ablkcipher_givencap - ablkcipher encapsulation shared descriptor
1195 * with HW-generated initialization vector.
1196 * @desc: pointer to buffer used for descriptor construction
1197 * @cdata: pointer to block cipher transform definitions
1198 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1199 * with OP_ALG_AAI_CBC.
1200 * @ivsize: initialization vector size
1201 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1202 * @ctx1_iv_off: IV offset in CONTEXT1 register
1204 void cnstr_shdsc_ablkcipher_givencap(u32 * const desc, struct alginfo *cdata,
1205 unsigned int ivsize, const bool is_rfc3686,
1206 const u32 ctx1_iv_off)
1208 u32 *key_jump_cmd, geniv;
1210 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1211 /* Skip if already shared */
1212 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1215 /* Load class1 key only */
1216 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1217 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1219 /* Load Nonce into CONTEXT1 reg */
1221 u8 *nonce = cdata->key_virt + cdata->keylen;
1223 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1224 LDST_CLASS_IND_CCB |
1225 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1226 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1227 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1228 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1230 set_jump_tgt_here(desc, key_jump_cmd);
1233 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
1234 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 | NFIFOENTRY_PTYPE_RND |
1235 (ivsize << NFIFOENTRY_DLEN_SHIFT);
1236 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
1237 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
1238 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1239 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_INFIFO |
1240 MOVE_DEST_CLASS1CTX | (ivsize << MOVE_LEN_SHIFT) |
1241 (ctx1_iv_off << MOVE_OFFSET_SHIFT));
1242 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1244 /* Copy generated IV to memory */
1245 append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1246 LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1248 /* Load Counter into CONTEXT1 reg */
1250 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1251 LDST_SRCDST_BYTE_CONTEXT |
1252 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1253 LDST_OFFSET_SHIFT));
1256 append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
1257 (1 << JUMP_OFFSET_SHIFT));
1259 /* Load operation */
1260 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1263 /* Perform operation */
1264 ablkcipher_append_src_dst(desc);
1267 print_hex_dump(KERN_ERR,
1268 "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
1269 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1272 EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_givencap);
1275 * cnstr_shdsc_xts_ablkcipher_encap - xts ablkcipher encapsulation shared
1277 * @desc: pointer to buffer used for descriptor construction
1278 * @cdata: pointer to block cipher transform definitions
1279 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1281 void cnstr_shdsc_xts_ablkcipher_encap(u32 * const desc, struct alginfo *cdata)
1284 * Set sector size to a big value, practically disabling
1285 * sector size segmentation in xts implementation. We cannot
1286 * take full advantage of this HW feature with existing
1287 * crypto API / dm-crypt SW architecture.
1289 __be64 sector_size = cpu_to_be64(BIT(15));
1292 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1293 /* Skip if already shared */
1294 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1297 /* Load class1 keys only */
1298 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1299 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1301 /* Load sector size with index 40 bytes (0x28) */
1302 append_load_as_imm(desc, (void *)§or_size, 8, LDST_CLASS_1_CCB |
1303 LDST_SRCDST_BYTE_CONTEXT |
1304 (0x28 << LDST_OFFSET_SHIFT));
1306 set_jump_tgt_here(desc, key_jump_cmd);
1309 * create sequence for loading the sector index
1310 * Upper 8B of IV - will be used as sector index
1311 * Lower 8B of IV - will be discarded
1313 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1314 (0x20 << LDST_OFFSET_SHIFT));
1315 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1317 /* Load operation */
1318 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1321 /* Perform operation */
1322 ablkcipher_append_src_dst(desc);
1325 print_hex_dump(KERN_ERR,
1326 "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
1327 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1330 EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_encap);
1333 * cnstr_shdsc_xts_ablkcipher_decap - xts ablkcipher decapsulation shared
1335 * @desc: pointer to buffer used for descriptor construction
1336 * @cdata: pointer to block cipher transform definitions
1337 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1339 void cnstr_shdsc_xts_ablkcipher_decap(u32 * const desc, struct alginfo *cdata)
1342 * Set sector size to a big value, practically disabling
1343 * sector size segmentation in xts implementation. We cannot
1344 * take full advantage of this HW feature with existing
1345 * crypto API / dm-crypt SW architecture.
1347 __be64 sector_size = cpu_to_be64(BIT(15));
1350 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1351 /* Skip if already shared */
1352 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1355 /* Load class1 key only */
1356 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1357 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1359 /* Load sector size with index 40 bytes (0x28) */
1360 append_load_as_imm(desc, (void *)§or_size, 8, LDST_CLASS_1_CCB |
1361 LDST_SRCDST_BYTE_CONTEXT |
1362 (0x28 << LDST_OFFSET_SHIFT));
1364 set_jump_tgt_here(desc, key_jump_cmd);
1367 * create sequence for loading the sector index
1368 * Upper 8B of IV - will be used as sector index
1369 * Lower 8B of IV - will be discarded
1371 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1372 (0x20 << LDST_OFFSET_SHIFT));
1373 append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1375 /* Load operation */
1376 append_dec_op1(desc, cdata->algtype);
1378 /* Perform operation */
1379 ablkcipher_append_src_dst(desc);
1382 print_hex_dump(KERN_ERR,
1383 "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
1384 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1387 EXPORT_SYMBOL(cnstr_shdsc_xts_ablkcipher_decap);
1389 MODULE_LICENSE("GPL");
1390 MODULE_DESCRIPTION("FSL CAAM descriptor support");
1391 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");