1 // SPDX-License-Identifier: GPL-2.0+
3 * Shared descriptors for aead, skcipher algorithms
5 * Copyright 2016-2019 NXP
9 #include "desc_constr.h"
10 #include "caamalg_desc.h"
13 * For aead functions, read payload and write payload,
14 * both of which are specified in req->src and req->dst
16 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
18 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
19 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
20 KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
23 /* Set DK bit in class 1 operation if shared */
24 static inline void append_dec_op1(u32 *desc, u32 type)
26 u32 *jump_cmd, *uncond_jump_cmd;
28 /* DK bit is valid only for AES */
29 if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
30 append_operation(desc, type | OP_ALG_AS_INITFINAL |
35 jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
36 append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT);
37 uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
38 set_jump_tgt_here(desc, jump_cmd);
39 append_operation(desc, type | OP_ALG_AS_INIT | OP_ALG_DECRYPT |
41 set_jump_tgt_here(desc, uncond_jump_cmd);
45 * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
46 * (non-protocol) with no (null) encryption.
47 * @desc: pointer to buffer used for descriptor construction
48 * @adata: pointer to authentication transform definitions.
49 * A split key is required for SEC Era < 6; the size of the split key
50 * is specified in this case. Valid algorithm values - one of
51 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
52 * with OP_ALG_AAI_HMAC_PRECOMP.
53 * @icvsize: integrity check value (ICV) size (truncated or full)
56 void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
57 unsigned int icvsize, int era)
59 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
61 init_sh_desc(desc, HDR_SHARE_SERIAL);
63 /* Skip if already shared */
64 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
67 if (adata->key_inline)
68 append_key_as_imm(desc, adata->key_virt,
69 adata->keylen_pad, adata->keylen,
70 CLASS_2 | KEY_DEST_MDHA_SPLIT |
73 append_key(desc, adata->key_dma, adata->keylen,
74 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
76 append_proto_dkp(desc, adata);
78 set_jump_tgt_here(desc, key_jump_cmd);
80 /* assoclen + cryptlen = seqinlen */
81 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
83 /* Prepare to read and write cryptlen + assoclen bytes */
84 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
85 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
88 * MOVE_LEN opcode is not available in all SEC HW revisions,
89 * thus need to do some magic, i.e. self-patch the descriptor
92 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
94 (0x6 << MOVE_LEN_SHIFT));
95 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
98 (0x8 << MOVE_LEN_SHIFT));
100 /* Class 2 operation */
101 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
104 /* Read and write cryptlen bytes */
105 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
107 set_move_tgt_here(desc, read_move_cmd);
108 set_move_tgt_here(desc, write_move_cmd);
109 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
110 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
114 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
115 LDST_SRCDST_BYTE_CONTEXT);
117 print_hex_dump_debug("aead null enc shdesc@" __stringify(__LINE__)": ",
118 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
121 EXPORT_SYMBOL(cnstr_shdsc_aead_null_encap);
124 * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
125 * (non-protocol) with no (null) decryption.
126 * @desc: pointer to buffer used for descriptor construction
127 * @adata: pointer to authentication transform definitions.
128 * A split key is required for SEC Era < 6; the size of the split key
129 * is specified in this case. Valid algorithm values - one of
130 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
131 * with OP_ALG_AAI_HMAC_PRECOMP.
132 * @icvsize: integrity check value (ICV) size (truncated or full)
135 void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
136 unsigned int icvsize, int era)
138 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
140 init_sh_desc(desc, HDR_SHARE_SERIAL);
142 /* Skip if already shared */
143 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
146 if (adata->key_inline)
147 append_key_as_imm(desc, adata->key_virt,
148 adata->keylen_pad, adata->keylen,
149 CLASS_2 | KEY_DEST_MDHA_SPLIT |
152 append_key(desc, adata->key_dma, adata->keylen,
153 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
155 append_proto_dkp(desc, adata);
157 set_jump_tgt_here(desc, key_jump_cmd);
159 /* Class 2 operation */
160 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
161 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
163 /* assoclen + cryptlen = seqoutlen */
164 append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
166 /* Prepare to read and write cryptlen + assoclen bytes */
167 append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
168 append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
171 * MOVE_LEN opcode is not available in all SEC HW revisions,
172 * thus need to do some magic, i.e. self-patch the descriptor
175 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
177 (0x6 << MOVE_LEN_SHIFT));
178 write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
181 (0x8 << MOVE_LEN_SHIFT));
183 /* Read and write cryptlen bytes */
184 aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
187 * Insert a NOP here, since we need at least 4 instructions between
188 * code patching the descriptor buffer and the location being patched.
190 jump_cmd = append_jump(desc, JUMP_TEST_ALL);
191 set_jump_tgt_here(desc, jump_cmd);
193 set_move_tgt_here(desc, read_move_cmd);
194 set_move_tgt_here(desc, write_move_cmd);
195 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
196 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
198 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
201 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
202 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
204 print_hex_dump_debug("aead null dec shdesc@" __stringify(__LINE__)": ",
205 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
208 EXPORT_SYMBOL(cnstr_shdsc_aead_null_decap);
210 static void init_sh_desc_key_aead(u32 * const desc,
211 struct alginfo * const cdata,
212 struct alginfo * const adata,
213 const bool is_rfc3686, u32 *nonce, int era)
216 unsigned int enckeylen = cdata->keylen;
218 /* Note: Context registers are saved. */
219 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
221 /* Skip if already shared */
222 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
227 * | key = {AUTH_KEY, ENC_KEY, NONCE}
228 * | enckeylen = encryption key size + nonce size
231 enckeylen -= CTR_RFC3686_NONCE_SIZE;
234 if (adata->key_inline)
235 append_key_as_imm(desc, adata->key_virt,
236 adata->keylen_pad, adata->keylen,
237 CLASS_2 | KEY_DEST_MDHA_SPLIT |
240 append_key(desc, adata->key_dma, adata->keylen,
241 CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
243 append_proto_dkp(desc, adata);
246 if (cdata->key_inline)
247 append_key_as_imm(desc, cdata->key_virt, enckeylen,
248 enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
250 append_key(desc, cdata->key_dma, enckeylen, CLASS_1 |
253 /* Load Counter into CONTEXT1 reg */
255 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
257 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
260 MOVE_DEST_CLASS1CTX |
261 (16 << MOVE_OFFSET_SHIFT) |
262 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
265 set_jump_tgt_here(desc, key_jump_cmd);
269 * cnstr_shdsc_aead_encap - IPSec ESP encapsulation shared descriptor
271 * @desc: pointer to buffer used for descriptor construction
272 * @cdata: pointer to block cipher transform definitions
273 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
274 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
275 * @adata: pointer to authentication transform definitions.
276 * A split key is required for SEC Era < 6; the size of the split key
277 * is specified in this case. Valid algorithm values - one of
278 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
279 * with OP_ALG_AAI_HMAC_PRECOMP.
280 * @ivsize: initialization vector size
281 * @icvsize: integrity check value (ICV) size (truncated or full)
282 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
283 * @nonce: pointer to rfc3686 nonce
284 * @ctx1_iv_off: IV offset in CONTEXT1 register
285 * @is_qi: true when called from caam/qi
288 void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
289 struct alginfo *adata, unsigned int ivsize,
290 unsigned int icvsize, const bool is_rfc3686,
291 u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
294 /* Note: Context registers are saved. */
295 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
297 /* Class 2 operation */
298 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
304 /* REG3 = assoclen */
305 append_seq_load(desc, 4, LDST_CLASS_DECO |
306 LDST_SRCDST_WORD_DECO_MATH3 |
307 (4 << LDST_OFFSET_SHIFT));
309 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
310 JUMP_COND_CALM | JUMP_COND_NCP |
311 JUMP_COND_NOP | JUMP_COND_NIP |
313 set_jump_tgt_here(desc, wait_load_cmd);
315 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
316 LDST_SRCDST_BYTE_CONTEXT |
317 (ctx1_iv_off << LDST_OFFSET_SHIFT));
320 /* Read and write assoclen bytes */
321 if (is_qi || era < 3) {
322 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
323 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
325 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
326 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
329 /* Skip assoc data */
330 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
332 /* read assoc before reading payload */
333 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
336 /* Load Counter into CONTEXT1 reg */
338 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
339 LDST_SRCDST_BYTE_CONTEXT |
340 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
343 /* Class 1 operation */
344 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
347 /* Read and write cryptlen bytes */
348 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
349 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
350 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
353 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
354 LDST_SRCDST_BYTE_CONTEXT);
356 print_hex_dump_debug("aead enc shdesc@" __stringify(__LINE__)": ",
357 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
360 EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
363 * cnstr_shdsc_aead_decap - IPSec ESP decapsulation shared descriptor
365 * @desc: pointer to buffer used for descriptor construction
366 * @cdata: pointer to block cipher transform definitions
367 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
368 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
369 * @adata: pointer to authentication transform definitions.
370 * A split key is required for SEC Era < 6; the size of the split key
371 * is specified in this case. Valid algorithm values - one of
372 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
373 * with OP_ALG_AAI_HMAC_PRECOMP.
374 * @ivsize: initialization vector size
375 * @icvsize: integrity check value (ICV) size (truncated or full)
376 * @geniv: whether to generate Encrypted Chain IV
377 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
378 * @nonce: pointer to rfc3686 nonce
379 * @ctx1_iv_off: IV offset in CONTEXT1 register
380 * @is_qi: true when called from caam/qi
383 void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
384 struct alginfo *adata, unsigned int ivsize,
385 unsigned int icvsize, const bool geniv,
386 const bool is_rfc3686, u32 *nonce,
387 const u32 ctx1_iv_off, const bool is_qi, int era)
389 /* Note: Context registers are saved. */
390 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
392 /* Class 2 operation */
393 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
394 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
399 /* REG3 = assoclen */
400 append_seq_load(desc, 4, LDST_CLASS_DECO |
401 LDST_SRCDST_WORD_DECO_MATH3 |
402 (4 << LDST_OFFSET_SHIFT));
404 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
405 JUMP_COND_CALM | JUMP_COND_NCP |
406 JUMP_COND_NOP | JUMP_COND_NIP |
408 set_jump_tgt_here(desc, wait_load_cmd);
411 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
412 LDST_SRCDST_BYTE_CONTEXT |
413 (ctx1_iv_off << LDST_OFFSET_SHIFT));
416 /* Read and write assoclen bytes */
417 if (is_qi || era < 3) {
418 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
420 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
423 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
426 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
428 append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
431 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
435 /* Skip assoc data */
436 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
438 /* read assoc before reading payload */
439 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
443 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
444 LDST_SRCDST_BYTE_CONTEXT |
445 (ctx1_iv_off << LDST_OFFSET_SHIFT));
446 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
447 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
450 /* Load Counter into CONTEXT1 reg */
452 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
453 LDST_SRCDST_BYTE_CONTEXT |
454 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
457 /* Choose operation */
459 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
462 append_dec_op1(desc, cdata->algtype);
464 /* Read and write cryptlen bytes */
465 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
466 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
467 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
470 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
471 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
473 print_hex_dump_debug("aead dec shdesc@" __stringify(__LINE__)": ",
474 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
477 EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
480 * cnstr_shdsc_aead_givencap - IPSec ESP encapsulation shared descriptor
481 * (non-protocol) with HW-generated initialization
483 * @desc: pointer to buffer used for descriptor construction
484 * @cdata: pointer to block cipher transform definitions
485 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
486 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
487 * @adata: pointer to authentication transform definitions.
488 * A split key is required for SEC Era < 6; the size of the split key
489 * is specified in this case. Valid algorithm values - one of
490 * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
491 * with OP_ALG_AAI_HMAC_PRECOMP.
492 * @ivsize: initialization vector size
493 * @icvsize: integrity check value (ICV) size (truncated or full)
494 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
495 * @nonce: pointer to rfc3686 nonce
496 * @ctx1_iv_off: IV offset in CONTEXT1 register
497 * @is_qi: true when called from caam/qi
500 void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
501 struct alginfo *adata, unsigned int ivsize,
502 unsigned int icvsize, const bool is_rfc3686,
503 u32 *nonce, const u32 ctx1_iv_off,
504 const bool is_qi, int era)
509 /* Note: Context registers are saved. */
510 init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
515 /* REG3 = assoclen */
516 append_seq_load(desc, 4, LDST_CLASS_DECO |
517 LDST_SRCDST_WORD_DECO_MATH3 |
518 (4 << LDST_OFFSET_SHIFT));
520 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
521 JUMP_COND_CALM | JUMP_COND_NCP |
522 JUMP_COND_NOP | JUMP_COND_NIP |
524 set_jump_tgt_here(desc, wait_load_cmd);
529 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
530 LDST_SRCDST_BYTE_CONTEXT |
531 (ctx1_iv_off << LDST_OFFSET_SHIFT));
537 geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
538 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
539 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
540 append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
541 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
542 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
543 append_move(desc, MOVE_WAITCOMP |
544 MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
545 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
546 (ivsize << MOVE_LEN_SHIFT));
547 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
550 /* Copy IV to class 1 context */
551 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
552 (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
553 (ivsize << MOVE_LEN_SHIFT));
555 /* Return to encryption */
556 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
559 /* Read and write assoclen bytes */
560 if (is_qi || era < 3) {
561 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
562 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
564 append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
565 append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
568 /* Skip assoc data */
569 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
571 /* read assoc before reading payload */
572 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
575 /* Copy iv from outfifo to class 2 fifo */
576 moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
577 NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
578 append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
579 LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
580 append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
581 LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
583 /* Load Counter into CONTEXT1 reg */
585 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
586 LDST_SRCDST_BYTE_CONTEXT |
587 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
590 /* Class 1 operation */
591 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
594 /* Will write ivsize + cryptlen */
595 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
597 /* Not need to reload iv */
598 append_seq_fifo_load(desc, ivsize,
601 /* Will read cryptlen */
602 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
605 * Wait for IV transfer (ofifo -> class2) to finish before starting
606 * ciphertext transfer (ofifo -> external memory).
608 wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NIFP);
609 set_jump_tgt_here(desc, wait_cmd);
611 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
612 FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
613 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
616 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
617 LDST_SRCDST_BYTE_CONTEXT);
619 print_hex_dump_debug("aead givenc shdesc@" __stringify(__LINE__)": ",
620 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
623 EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
626 * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
627 * @desc: pointer to buffer used for descriptor construction
628 * @cdata: pointer to block cipher transform definitions
629 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
630 * @ivsize: initialization vector size
631 * @icvsize: integrity check value (ICV) size (truncated or full)
632 * @is_qi: true when called from caam/qi
634 void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
635 unsigned int ivsize, unsigned int icvsize,
638 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
639 *zero_assoc_jump_cmd2;
641 init_sh_desc(desc, HDR_SHARE_SERIAL);
643 /* skip key loading if they are loaded due to sharing */
644 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
646 if (cdata->key_inline)
647 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
648 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
650 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
652 set_jump_tgt_here(desc, key_jump_cmd);
654 /* class 1 operation */
655 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
661 /* REG3 = assoclen */
662 append_seq_load(desc, 4, LDST_CLASS_DECO |
663 LDST_SRCDST_WORD_DECO_MATH3 |
664 (4 << LDST_OFFSET_SHIFT));
666 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
667 JUMP_COND_CALM | JUMP_COND_NCP |
668 JUMP_COND_NOP | JUMP_COND_NIP |
670 set_jump_tgt_here(desc, wait_load_cmd);
672 append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
675 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
679 /* if assoclen + cryptlen is ZERO, skip to ICV write */
680 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
684 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
685 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
687 /* if assoclen is ZERO, skip reading the assoc data */
688 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
689 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
692 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
694 /* skip assoc data */
695 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
697 /* cryptlen = seqinlen - assoclen */
698 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
700 /* if cryptlen is ZERO jump to zero-payload commands */
701 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
704 /* read assoc data */
705 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
706 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
707 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
709 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
711 /* write encrypted data */
712 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
714 /* read payload data */
715 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
716 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
718 /* jump to ICV writing */
720 append_jump(desc, JUMP_TEST_ALL | 4);
722 append_jump(desc, JUMP_TEST_ALL | 2);
724 /* zero-payload commands */
725 set_jump_tgt_here(desc, zero_payload_jump_cmd);
727 /* read assoc data */
728 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
729 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
731 /* jump to ICV writing */
732 append_jump(desc, JUMP_TEST_ALL | 2);
734 /* There is no input data */
735 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
738 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
739 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
743 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
744 LDST_SRCDST_BYTE_CONTEXT);
746 print_hex_dump_debug("gcm enc shdesc@" __stringify(__LINE__)": ",
747 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
750 EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
753 * cnstr_shdsc_gcm_decap - gcm decapsulation shared descriptor
754 * @desc: pointer to buffer used for descriptor construction
755 * @cdata: pointer to block cipher transform definitions
756 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
757 * @ivsize: initialization vector size
758 * @icvsize: integrity check value (ICV) size (truncated or full)
759 * @is_qi: true when called from caam/qi
761 void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
762 unsigned int ivsize, unsigned int icvsize,
765 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
767 init_sh_desc(desc, HDR_SHARE_SERIAL);
769 /* skip key loading if they are loaded due to sharing */
770 key_jump_cmd = append_jump(desc, JUMP_JSL |
771 JUMP_TEST_ALL | JUMP_COND_SHRD);
772 if (cdata->key_inline)
773 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
774 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
776 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
778 set_jump_tgt_here(desc, key_jump_cmd);
780 /* class 1 operation */
781 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
782 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
787 /* REG3 = assoclen */
788 append_seq_load(desc, 4, LDST_CLASS_DECO |
789 LDST_SRCDST_WORD_DECO_MATH3 |
790 (4 << LDST_OFFSET_SHIFT));
792 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
793 JUMP_COND_CALM | JUMP_COND_NCP |
794 JUMP_COND_NOP | JUMP_COND_NIP |
796 set_jump_tgt_here(desc, wait_load_cmd);
798 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
799 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
802 /* if assoclen is ZERO, skip reading the assoc data */
803 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
804 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
807 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
809 /* skip assoc data */
810 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
812 /* read assoc data */
813 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
814 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
816 set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
818 /* cryptlen = seqoutlen - assoclen */
819 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
821 /* jump to zero-payload command if cryptlen is zero */
822 zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
825 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
827 /* store encrypted data */
828 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
830 /* read payload data */
831 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
832 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
834 /* zero-payload command */
835 set_jump_tgt_here(desc, zero_payload_jump_cmd);
838 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
839 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
841 print_hex_dump_debug("gcm dec shdesc@" __stringify(__LINE__)": ",
842 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
845 EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
848 * cnstr_shdsc_rfc4106_encap - IPSec ESP gcm encapsulation shared descriptor
850 * @desc: pointer to buffer used for descriptor construction
851 * @cdata: pointer to block cipher transform definitions
852 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
853 * @ivsize: initialization vector size
854 * @icvsize: integrity check value (ICV) size (truncated or full)
855 * @is_qi: true when called from caam/qi
857 * Input sequence: AAD | PTXT
858 * Output sequence: AAD | CTXT | ICV
859 * AAD length (assoclen), which includes the IV length, is available in Math3.
861 void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
862 unsigned int ivsize, unsigned int icvsize,
865 u32 *key_jump_cmd, *zero_cryptlen_jump_cmd, *skip_instructions;
866 init_sh_desc(desc, HDR_SHARE_SERIAL);
868 /* Skip key loading if it is loaded due to sharing */
869 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
871 if (cdata->key_inline)
872 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
873 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
875 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
877 set_jump_tgt_here(desc, key_jump_cmd);
879 /* Class 1 operation */
880 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
886 /* REG3 = assoclen */
887 append_seq_load(desc, 4, LDST_CLASS_DECO |
888 LDST_SRCDST_WORD_DECO_MATH3 |
889 (4 << LDST_OFFSET_SHIFT));
891 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
892 JUMP_COND_CALM | JUMP_COND_NCP |
893 JUMP_COND_NOP | JUMP_COND_NIP |
895 set_jump_tgt_here(desc, wait_load_cmd);
897 /* Read salt and IV */
898 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
899 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
901 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
902 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
905 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
906 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
909 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
911 /* Read cryptlen and set this value into VARSEQOUTLEN */
912 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
914 /* If cryptlen is ZERO jump to AAD command */
915 zero_cryptlen_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
919 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
920 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
922 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
923 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA);
926 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
927 append_math_add(desc, VARSEQINLEN, VARSEQOUTLEN, REG0, CAAM_CMD_SZ);
929 /* Write encrypted data */
930 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
932 /* Read payload data */
933 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
934 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
936 /* Jump instructions to avoid double reading of AAD */
937 skip_instructions = append_jump(desc, JUMP_TEST_ALL);
939 /* There is no input data, cryptlen = 0 */
940 set_jump_tgt_here(desc, zero_cryptlen_jump_cmd);
943 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
944 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
946 set_jump_tgt_here(desc, skip_instructions);
949 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
950 LDST_SRCDST_BYTE_CONTEXT);
952 print_hex_dump_debug("rfc4106 enc shdesc@" __stringify(__LINE__)": ",
953 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
956 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap);
959 * cnstr_shdsc_rfc4106_decap - IPSec ESP gcm decapsulation shared descriptor
961 * @desc: pointer to buffer used for descriptor construction
962 * @cdata: pointer to block cipher transform definitions
963 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
964 * @ivsize: initialization vector size
965 * @icvsize: integrity check value (ICV) size (truncated or full)
966 * @is_qi: true when called from caam/qi
968 void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
969 unsigned int ivsize, unsigned int icvsize,
974 init_sh_desc(desc, HDR_SHARE_SERIAL);
976 /* Skip key loading if it is loaded due to sharing */
977 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
979 if (cdata->key_inline)
980 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
981 cdata->keylen, CLASS_1 |
984 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
986 set_jump_tgt_here(desc, key_jump_cmd);
988 /* Class 1 operation */
989 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
990 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
995 /* REG3 = assoclen */
996 append_seq_load(desc, 4, LDST_CLASS_DECO |
997 LDST_SRCDST_WORD_DECO_MATH3 |
998 (4 << LDST_OFFSET_SHIFT));
1000 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1001 JUMP_COND_CALM | JUMP_COND_NCP |
1002 JUMP_COND_NOP | JUMP_COND_NIP |
1004 set_jump_tgt_here(desc, wait_load_cmd);
1006 /* Read salt and IV */
1007 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1008 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1010 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1011 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1014 append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
1015 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1017 /* Read assoc data */
1018 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1019 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1022 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
1024 /* Will read cryptlen bytes */
1025 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1027 /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1028 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1030 /* Skip assoc data */
1031 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1033 /* Will write cryptlen bytes */
1034 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1036 /* Store payload data */
1037 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1039 /* Read encrypted data */
1040 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1041 FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1044 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1045 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1047 print_hex_dump_debug("rfc4106 dec shdesc@" __stringify(__LINE__)": ",
1048 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1051 EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap);
1054 * cnstr_shdsc_rfc4543_encap - IPSec ESP gmac encapsulation shared descriptor
1056 * @desc: pointer to buffer used for descriptor construction
1057 * @cdata: pointer to block cipher transform definitions
1058 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1059 * @ivsize: initialization vector size
1060 * @icvsize: integrity check value (ICV) size (truncated or full)
1061 * @is_qi: true when called from caam/qi
1063 void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
1064 unsigned int ivsize, unsigned int icvsize,
1067 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1069 init_sh_desc(desc, HDR_SHARE_SERIAL);
1071 /* Skip key loading if it is loaded due to sharing */
1072 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1074 if (cdata->key_inline)
1075 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1076 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1078 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1079 KEY_DEST_CLASS_REG);
1080 set_jump_tgt_here(desc, key_jump_cmd);
1082 /* Class 1 operation */
1083 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1087 /* assoclen is not needed, skip it */
1088 append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1090 /* Read salt and IV */
1091 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1092 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1094 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1095 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1098 /* assoclen + cryptlen = seqinlen */
1099 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1102 * MOVE_LEN opcode is not available in all SEC HW revisions,
1103 * thus need to do some magic, i.e. self-patch the descriptor
1106 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1107 (0x6 << MOVE_LEN_SHIFT));
1108 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1109 (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1111 /* Will read assoclen + cryptlen bytes */
1112 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1114 /* Will write assoclen + cryptlen bytes */
1115 append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1117 /* Read and write assoclen + cryptlen bytes */
1118 aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1120 set_move_tgt_here(desc, read_move_cmd);
1121 set_move_tgt_here(desc, write_move_cmd);
1122 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1123 /* Move payload data to OFIFO */
1124 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1127 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
1128 LDST_SRCDST_BYTE_CONTEXT);
1130 print_hex_dump_debug("rfc4543 enc shdesc@" __stringify(__LINE__)": ",
1131 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1134 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap);
1137 * cnstr_shdsc_rfc4543_decap - IPSec ESP gmac decapsulation shared descriptor
1139 * @desc: pointer to buffer used for descriptor construction
1140 * @cdata: pointer to block cipher transform definitions
1141 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
1142 * @ivsize: initialization vector size
1143 * @icvsize: integrity check value (ICV) size (truncated or full)
1144 * @is_qi: true when called from caam/qi
1146 void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
1147 unsigned int ivsize, unsigned int icvsize,
1150 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
1152 init_sh_desc(desc, HDR_SHARE_SERIAL);
1154 /* Skip key loading if it is loaded due to sharing */
1155 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1157 if (cdata->key_inline)
1158 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1159 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1161 append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
1162 KEY_DEST_CLASS_REG);
1163 set_jump_tgt_here(desc, key_jump_cmd);
1165 /* Class 1 operation */
1166 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1167 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1170 /* assoclen is not needed, skip it */
1171 append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
1173 /* Read salt and IV */
1174 append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
1175 cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
1177 append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
1178 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
1181 /* assoclen + cryptlen = seqoutlen */
1182 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1185 * MOVE_LEN opcode is not available in all SEC HW revisions,
1186 * thus need to do some magic, i.e. self-patch the descriptor
1189 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1190 (0x6 << MOVE_LEN_SHIFT));
1191 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1192 (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
1194 /* Will read assoclen + cryptlen bytes */
1195 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1197 /* Will write assoclen + cryptlen bytes */
1198 append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1200 /* Store payload data */
1201 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1203 /* In-snoop assoclen + cryptlen data */
1204 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1205 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1207 set_move_tgt_here(desc, read_move_cmd);
1208 set_move_tgt_here(desc, write_move_cmd);
1209 append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1210 /* Move payload data to OFIFO */
1211 append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1212 append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1215 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS1 |
1216 FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1218 print_hex_dump_debug("rfc4543 dec shdesc@" __stringify(__LINE__)": ",
1219 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1222 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
1225 * cnstr_shdsc_chachapoly - Chacha20 + Poly1305 generic AEAD (rfc7539) and
1226 * IPsec ESP (rfc7634, a.k.a. rfc7539esp) shared
1227 * descriptor (non-protocol).
1228 * @desc: pointer to buffer used for descriptor construction
1229 * @cdata: pointer to block cipher transform definitions
1230 * Valid algorithm values - OP_ALG_ALGSEL_CHACHA20 ANDed with
1232 * @adata: pointer to authentication transform definitions
1233 * Valid algorithm values - OP_ALG_ALGSEL_POLY1305 ANDed with
1235 * @ivsize: initialization vector size
1236 * @icvsize: integrity check value (ICV) size (truncated or full)
1237 * @encap: true if encapsulation, false if decapsulation
1238 * @is_qi: true when called from caam/qi
1240 void cnstr_shdsc_chachapoly(u32 * const desc, struct alginfo *cdata,
1241 struct alginfo *adata, unsigned int ivsize,
1242 unsigned int icvsize, const bool encap,
1245 u32 *key_jump_cmd, *wait_cmd;
1247 const bool is_ipsec = (ivsize != CHACHAPOLY_IV_SIZE);
1249 /* Note: Context registers are saved. */
1250 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1252 /* skip key loading if they are loaded due to sharing */
1253 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1256 append_key_as_imm(desc, cdata->key_virt, cdata->keylen, cdata->keylen,
1257 CLASS_1 | KEY_DEST_CLASS_REG);
1259 /* For IPsec load the salt from keymat in the context register */
1261 append_load_as_imm(desc, cdata->key_virt + cdata->keylen, 4,
1262 LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT |
1263 4 << LDST_OFFSET_SHIFT);
1265 set_jump_tgt_here(desc, key_jump_cmd);
1267 /* Class 2 and 1 operations: Poly & ChaCha */
1269 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
1271 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1274 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
1275 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1276 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1282 u32 ctx1_iv_off = is_ipsec ? 8 : 4;
1284 /* REG3 = assoclen */
1285 append_seq_load(desc, 4, LDST_CLASS_DECO |
1286 LDST_SRCDST_WORD_DECO_MATH3 |
1287 4 << LDST_OFFSET_SHIFT);
1289 wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1290 JUMP_COND_CALM | JUMP_COND_NCP |
1291 JUMP_COND_NOP | JUMP_COND_NIP |
1293 set_jump_tgt_here(desc, wait_load_cmd);
1295 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
1296 LDST_SRCDST_BYTE_CONTEXT |
1297 ctx1_iv_off << LDST_OFFSET_SHIFT);
1302 * Read associated data from the input and send them to class1 and
1303 * class2 alignment blocks. From class1 send data to output fifo and
1304 * then write it to memory since we don't need to encrypt AD.
1306 nfifo = NFIFOENTRY_DEST_BOTH | NFIFOENTRY_FC1 | NFIFOENTRY_FC2 |
1307 NFIFOENTRY_DTYPE_POLY | NFIFOENTRY_BND;
1308 append_load_imm_u32(desc, nfifo, LDST_CLASS_IND_CCB |
1309 LDST_SRCDST_WORD_INFO_FIFO_SM | LDLEN_MATH3);
1311 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
1312 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1313 append_seq_fifo_load(desc, 0, FIFOLD_TYPE_NOINFOFIFO |
1314 FIFOLD_CLASS_CLASS1 | LDST_VLF);
1315 append_move_len(desc, MOVE_AUX_LS | MOVE_SRC_AUX_ABLK |
1316 MOVE_DEST_OUTFIFO | MOVELEN_MRSEL_MATH3);
1317 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
1319 /* IPsec - copy IV at the output */
1321 append_seq_fifo_store(desc, ivsize, FIFOST_TYPE_METADATA |
1324 wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TYPE_LOCAL |
1325 JUMP_COND_NOP | JUMP_TEST_ALL);
1326 set_jump_tgt_here(desc, wait_cmd);
1329 /* Read and write cryptlen bytes */
1330 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1331 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0,
1333 aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
1336 append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
1337 LDST_SRCDST_BYTE_CONTEXT);
1339 /* Read and write cryptlen bytes */
1340 append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0,
1342 append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0,
1344 aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
1346 /* Load ICV for verification */
1347 append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
1348 FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
1351 print_hex_dump_debug("chachapoly shdesc@" __stringify(__LINE__)": ",
1352 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1355 EXPORT_SYMBOL(cnstr_shdsc_chachapoly);
1357 /* For skcipher encrypt and decrypt, read from req->src and write to req->dst */
1358 static inline void skcipher_append_src_dst(u32 *desc)
1360 append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1361 append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1362 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
1363 KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1364 append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
1368 * cnstr_shdsc_skcipher_encap - skcipher encapsulation shared descriptor
1369 * @desc: pointer to buffer used for descriptor construction
1370 * @cdata: pointer to block cipher transform definitions
1371 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1372 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
1373 * - OP_ALG_ALGSEL_CHACHA20
1374 * @ivsize: initialization vector size
1375 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1376 * @ctx1_iv_off: IV offset in CONTEXT1 register
1378 void cnstr_shdsc_skcipher_encap(u32 * const desc, struct alginfo *cdata,
1379 unsigned int ivsize, const bool is_rfc3686,
1380 const u32 ctx1_iv_off)
1383 u32 options = cdata->algtype | OP_ALG_AS_INIT | OP_ALG_ENCRYPT;
1384 bool is_chacha20 = ((cdata->algtype & OP_ALG_ALGSEL_MASK) ==
1385 OP_ALG_ALGSEL_CHACHA20);
1387 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1388 /* Skip if already shared */
1389 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1392 /* Load class1 key only */
1393 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1394 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1396 /* Load nonce into CONTEXT1 reg */
1398 const u8 *nonce = cdata->key_virt + cdata->keylen;
1400 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1401 LDST_CLASS_IND_CCB |
1402 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1403 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1404 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1405 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1408 set_jump_tgt_here(desc, key_jump_cmd);
1410 /* Load IV, if there is one */
1412 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1413 LDST_CLASS_1_CCB | (ctx1_iv_off <<
1414 LDST_OFFSET_SHIFT));
1416 /* Load counter into CONTEXT1 reg */
1418 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1419 LDST_SRCDST_BYTE_CONTEXT |
1420 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1421 LDST_OFFSET_SHIFT));
1423 /* Load operation */
1425 options |= OP_ALG_AS_FINALIZE;
1426 append_operation(desc, options);
1428 /* Perform operation */
1429 skcipher_append_src_dst(desc);
1432 if (!is_chacha20 && ivsize)
1433 append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1434 LDST_CLASS_1_CCB | (ctx1_iv_off <<
1435 LDST_OFFSET_SHIFT));
1437 print_hex_dump_debug("skcipher enc shdesc@" __stringify(__LINE__)": ",
1438 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1441 EXPORT_SYMBOL(cnstr_shdsc_skcipher_encap);
1444 * cnstr_shdsc_skcipher_decap - skcipher decapsulation shared descriptor
1445 * @desc: pointer to buffer used for descriptor construction
1446 * @cdata: pointer to block cipher transform definitions
1447 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
1448 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
1449 * - OP_ALG_ALGSEL_CHACHA20
1450 * @ivsize: initialization vector size
1451 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
1452 * @ctx1_iv_off: IV offset in CONTEXT1 register
1454 void cnstr_shdsc_skcipher_decap(u32 * const desc, struct alginfo *cdata,
1455 unsigned int ivsize, const bool is_rfc3686,
1456 const u32 ctx1_iv_off)
1459 bool is_chacha20 = ((cdata->algtype & OP_ALG_ALGSEL_MASK) ==
1460 OP_ALG_ALGSEL_CHACHA20);
1462 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1463 /* Skip if already shared */
1464 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1467 /* Load class1 key only */
1468 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1469 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1471 /* Load nonce into CONTEXT1 reg */
1473 const u8 *nonce = cdata->key_virt + cdata->keylen;
1475 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1476 LDST_CLASS_IND_CCB |
1477 LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1478 append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO |
1479 MOVE_DEST_CLASS1CTX | (16 << MOVE_OFFSET_SHIFT) |
1480 (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1483 set_jump_tgt_here(desc, key_jump_cmd);
1485 /* Load IV, if there is one */
1487 append_seq_load(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1488 LDST_CLASS_1_CCB | (ctx1_iv_off <<
1489 LDST_OFFSET_SHIFT));
1491 /* Load counter into CONTEXT1 reg */
1493 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1494 LDST_SRCDST_BYTE_CONTEXT |
1495 ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1496 LDST_OFFSET_SHIFT));
1498 /* Choose operation */
1500 append_operation(desc, cdata->algtype | OP_ALG_AS_INIT |
1503 append_dec_op1(desc, cdata->algtype);
1505 /* Perform operation */
1506 skcipher_append_src_dst(desc);
1509 if (!is_chacha20 && ivsize)
1510 append_seq_store(desc, ivsize, LDST_SRCDST_BYTE_CONTEXT |
1511 LDST_CLASS_1_CCB | (ctx1_iv_off <<
1512 LDST_OFFSET_SHIFT));
1514 print_hex_dump_debug("skcipher dec shdesc@" __stringify(__LINE__)": ",
1515 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1518 EXPORT_SYMBOL(cnstr_shdsc_skcipher_decap);
1521 * cnstr_shdsc_xts_skcipher_encap - xts skcipher encapsulation shared descriptor
1522 * @desc: pointer to buffer used for descriptor construction
1523 * @cdata: pointer to block cipher transform definitions
1524 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1526 void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, struct alginfo *cdata)
1529 * Set sector size to a big value, practically disabling
1530 * sector size segmentation in xts implementation. We cannot
1531 * take full advantage of this HW feature with existing
1532 * crypto API / dm-crypt SW architecture.
1534 __be64 sector_size = cpu_to_be64(BIT(15));
1537 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1538 /* Skip if already shared */
1539 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1542 /* Load class1 keys only */
1543 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1544 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1546 /* Load sector size with index 40 bytes (0x28) */
1547 append_load_as_imm(desc, (void *)§or_size, 8, LDST_CLASS_1_CCB |
1548 LDST_SRCDST_BYTE_CONTEXT |
1549 (0x28 << LDST_OFFSET_SHIFT));
1551 set_jump_tgt_here(desc, key_jump_cmd);
1554 * create sequence for loading the sector index / 16B tweak value
1555 * Lower 8B of IV - sector index / tweak lower half
1556 * Upper 8B of IV - upper half of 16B tweak
1558 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1559 (0x20 << LDST_OFFSET_SHIFT));
1560 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1561 (0x30 << LDST_OFFSET_SHIFT));
1563 /* Load operation */
1564 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
1567 /* Perform operation */
1568 skcipher_append_src_dst(desc);
1570 /* Store lower 8B and upper 8B of IV */
1571 append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1572 (0x20 << LDST_OFFSET_SHIFT));
1573 append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1574 (0x30 << LDST_OFFSET_SHIFT));
1576 print_hex_dump_debug("xts skcipher enc shdesc@" __stringify(__LINE__)
1577 ": ", DUMP_PREFIX_ADDRESS, 16, 4,
1578 desc, desc_bytes(desc), 1);
1580 EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_encap);
1583 * cnstr_shdsc_xts_skcipher_decap - xts skcipher decapsulation shared descriptor
1584 * @desc: pointer to buffer used for descriptor construction
1585 * @cdata: pointer to block cipher transform definitions
1586 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_XTS.
1588 void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, struct alginfo *cdata)
1591 * Set sector size to a big value, practically disabling
1592 * sector size segmentation in xts implementation. We cannot
1593 * take full advantage of this HW feature with existing
1594 * crypto API / dm-crypt SW architecture.
1596 __be64 sector_size = cpu_to_be64(BIT(15));
1599 init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1600 /* Skip if already shared */
1601 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1604 /* Load class1 key only */
1605 append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
1606 cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
1608 /* Load sector size with index 40 bytes (0x28) */
1609 append_load_as_imm(desc, (void *)§or_size, 8, LDST_CLASS_1_CCB |
1610 LDST_SRCDST_BYTE_CONTEXT |
1611 (0x28 << LDST_OFFSET_SHIFT));
1613 set_jump_tgt_here(desc, key_jump_cmd);
1616 * create sequence for loading the sector index / 16B tweak value
1617 * Lower 8B of IV - sector index / tweak lower half
1618 * Upper 8B of IV - upper half of 16B tweak
1620 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1621 (0x20 << LDST_OFFSET_SHIFT));
1622 append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1623 (0x30 << LDST_OFFSET_SHIFT));
1624 /* Load operation */
1625 append_dec_op1(desc, cdata->algtype);
1627 /* Perform operation */
1628 skcipher_append_src_dst(desc);
1630 /* Store lower 8B and upper 8B of IV */
1631 append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1632 (0x20 << LDST_OFFSET_SHIFT));
1633 append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1634 (0x30 << LDST_OFFSET_SHIFT));
1636 print_hex_dump_debug("xts skcipher dec shdesc@" __stringify(__LINE__)
1637 ": ", DUMP_PREFIX_ADDRESS, 16, 4, desc,
1638 desc_bytes(desc), 1);
1640 EXPORT_SYMBOL(cnstr_shdsc_xts_skcipher_decap);
1642 MODULE_LICENSE("GPL");
1643 MODULE_DESCRIPTION("FSL CAAM descriptor support");
1644 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");