2 * This file is part of the Chelsio T6 Crypto driver for Linux.
4 * Copyright (c) 2003-2016 Chelsio Communications, Inc. All rights reserved.
6 * This software is available to you under a choice of one of two
7 * licenses. You may choose to be licensed under the terms of the GNU
8 * General Public License (GPL) Version 2, available from the file
9 * COPYING in the main directory of this source tree, or the
10 * OpenIB.org BSD license below:
12 * Redistribution and use in source and binary forms, with or
13 * without modification, are permitted provided that the following
16 * - Redistributions of source code must retain the above
17 * copyright notice, this list of conditions and the following
20 * - Redistributions in binary form must reproduce the above
21 * copyright notice, this list of conditions and the following
22 * disclaimer in the documentation and/or other materials
23 * provided with the distribution.
25 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
26 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
27 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
28 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
29 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
30 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
31 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
34 * Written and Maintained by:
35 * Manoj Malviya (manojmalviya@chelsio.com)
36 * Atul Gupta (atul.gupta@chelsio.com)
37 * Jitendra Lulla (jlulla@chelsio.com)
38 * Yeshaswi M R Gowda (yeshaswi@chelsio.com)
39 * Harsh Jain (harsh@chelsio.com)
42 #define pr_fmt(fmt) "chcr:" fmt
44 #include <linux/kernel.h>
45 #include <linux/module.h>
46 #include <linux/crypto.h>
47 #include <linux/cryptohash.h>
48 #include <linux/skbuff.h>
49 #include <linux/rtnetlink.h>
50 #include <linux/highmem.h>
51 #include <linux/scatterlist.h>
53 #include <crypto/aes.h>
54 #include <crypto/algapi.h>
55 #include <crypto/hash.h>
56 #include <crypto/sha.h>
57 #include <crypto/authenc.h>
58 #include <crypto/ctr.h>
59 #include <crypto/gf128mul.h>
60 #include <crypto/internal/aead.h>
61 #include <crypto/null.h>
62 #include <crypto/internal/skcipher.h>
63 #include <crypto/aead.h>
64 #include <crypto/scatterwalk.h>
65 #include <crypto/internal/hash.h>
69 #include "chcr_core.h"
70 #include "chcr_algo.h"
71 #include "chcr_crypto.h"
73 static inline struct chcr_aead_ctx *AEAD_CTX(struct chcr_context *ctx)
75 return ctx->crypto_ctx->aeadctx;
78 static inline struct ablk_ctx *ABLK_CTX(struct chcr_context *ctx)
80 return ctx->crypto_ctx->ablkctx;
83 static inline struct hmac_ctx *HMAC_CTX(struct chcr_context *ctx)
85 return ctx->crypto_ctx->hmacctx;
88 static inline struct chcr_gcm_ctx *GCM_CTX(struct chcr_aead_ctx *gctx)
90 return gctx->ctx->gcm;
93 static inline struct chcr_authenc_ctx *AUTHENC_CTX(struct chcr_aead_ctx *gctx)
95 return gctx->ctx->authenc;
98 static inline struct uld_ctx *ULD_CTX(struct chcr_context *ctx)
100 return ctx->dev->u_ctx;
103 static inline int is_ofld_imm(const struct sk_buff *skb)
105 return (skb->len <= CRYPTO_MAX_IMM_TX_PKT_LEN);
109 * sgl_len - calculates the size of an SGL of the given capacity
110 * @n: the number of SGL entries
111 * Calculates the number of flits needed for a scatter/gather list that
112 * can hold the given number of entries.
114 static inline unsigned int sgl_len(unsigned int n)
117 return (3 * n) / 2 + (n & 1) + 2;
120 static void chcr_verify_tag(struct aead_request *req, u8 *input, int *err)
122 u8 temp[SHA512_DIGEST_SIZE];
123 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
124 int authsize = crypto_aead_authsize(tfm);
125 struct cpl_fw6_pld *fw6_pld;
128 fw6_pld = (struct cpl_fw6_pld *)input;
129 if ((get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) ||
130 (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_GCM)) {
131 cmp = crypto_memneq(&fw6_pld->data[2], (fw6_pld + 1), authsize);
134 sg_pcopy_to_buffer(req->src, sg_nents(req->src), temp,
135 authsize, req->assoclen +
136 req->cryptlen - authsize);
137 cmp = crypto_memneq(temp, (fw6_pld + 1), authsize);
146 * chcr_handle_resp - Unmap the DMA buffers associated with the request
147 * @req: crypto request
149 int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input,
152 struct crypto_tfm *tfm = req->tfm;
153 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
154 struct uld_ctx *u_ctx = ULD_CTX(ctx);
155 struct chcr_req_ctx ctx_req;
156 unsigned int digestsize, updated_digestsize;
157 struct adapter *adap = padap(ctx->dev);
159 switch (tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
160 case CRYPTO_ALG_TYPE_AEAD:
161 ctx_req.req.aead_req = aead_request_cast(req);
162 ctx_req.ctx.reqctx = aead_request_ctx(ctx_req.req.aead_req);
163 dma_unmap_sg(&u_ctx->lldi.pdev->dev, ctx_req.ctx.reqctx->dst,
164 ctx_req.ctx.reqctx->dst_nents, DMA_FROM_DEVICE);
165 if (ctx_req.ctx.reqctx->skb) {
166 kfree_skb(ctx_req.ctx.reqctx->skb);
167 ctx_req.ctx.reqctx->skb = NULL;
169 free_new_sg(ctx_req.ctx.reqctx->newdstsg);
170 ctx_req.ctx.reqctx->newdstsg = NULL;
171 if (ctx_req.ctx.reqctx->verify == VERIFY_SW) {
172 chcr_verify_tag(ctx_req.req.aead_req, input,
174 ctx_req.ctx.reqctx->verify = VERIFY_HW;
176 ctx_req.req.aead_req->base.complete(req, err);
179 case CRYPTO_ALG_TYPE_ABLKCIPHER:
180 err = chcr_handle_cipher_resp(ablkcipher_request_cast(req),
184 case CRYPTO_ALG_TYPE_AHASH:
185 ctx_req.req.ahash_req = ahash_request_cast(req);
186 ctx_req.ctx.ahash_ctx =
187 ahash_request_ctx(ctx_req.req.ahash_req);
189 crypto_ahash_digestsize(crypto_ahash_reqtfm(
190 ctx_req.req.ahash_req));
191 updated_digestsize = digestsize;
192 if (digestsize == SHA224_DIGEST_SIZE)
193 updated_digestsize = SHA256_DIGEST_SIZE;
194 else if (digestsize == SHA384_DIGEST_SIZE)
195 updated_digestsize = SHA512_DIGEST_SIZE;
196 if (ctx_req.ctx.ahash_ctx->skb) {
197 kfree_skb(ctx_req.ctx.ahash_ctx->skb);
198 ctx_req.ctx.ahash_ctx->skb = NULL;
200 if (ctx_req.ctx.ahash_ctx->result == 1) {
201 ctx_req.ctx.ahash_ctx->result = 0;
202 memcpy(ctx_req.req.ahash_req->result, input +
203 sizeof(struct cpl_fw6_pld),
206 memcpy(ctx_req.ctx.ahash_ctx->partial_hash, input +
207 sizeof(struct cpl_fw6_pld),
210 ctx_req.req.ahash_req->base.complete(req, err);
213 atomic_inc(&adap->chcr_stats.complete);
218 * calc_tx_flits_ofld - calculate # of flits for an offload packet
220 * Returns the number of flits needed for the given offload packet.
221 * These packets are already fully constructed and no additional headers
224 static inline unsigned int calc_tx_flits_ofld(const struct sk_buff *skb)
226 unsigned int flits, cnt;
228 if (is_ofld_imm(skb))
229 return DIV_ROUND_UP(skb->len, 8);
231 flits = skb_transport_offset(skb) / 8; /* headers */
232 cnt = skb_shinfo(skb)->nr_frags;
233 if (skb_tail_pointer(skb) != skb_transport_header(skb))
235 return flits + sgl_len(cnt);
238 static inline void get_aes_decrypt_key(unsigned char *dec_key,
239 const unsigned char *key,
240 unsigned int keylength)
248 case AES_KEYLENGTH_128BIT:
249 nk = KEYLENGTH_4BYTES;
250 nr = NUMBER_OF_ROUNDS_10;
252 case AES_KEYLENGTH_192BIT:
253 nk = KEYLENGTH_6BYTES;
254 nr = NUMBER_OF_ROUNDS_12;
256 case AES_KEYLENGTH_256BIT:
257 nk = KEYLENGTH_8BYTES;
258 nr = NUMBER_OF_ROUNDS_14;
263 for (i = 0; i < nk; i++)
264 w_ring[i] = be32_to_cpu(*(u32 *)&key[4 * i]);
267 temp = w_ring[nk - 1];
268 while (i + nk < (nr + 1) * 4) {
271 temp = (temp << 8) | (temp >> 24);
272 temp = aes_ks_subword(temp);
273 temp ^= round_constant[i / nk];
274 } else if (nk == 8 && (i % 4 == 0)) {
275 temp = aes_ks_subword(temp);
277 w_ring[i % nk] ^= temp;
278 temp = w_ring[i % nk];
282 for (k = 0, j = i % nk; k < nk; k++) {
283 *((u32 *)dec_key + k) = htonl(w_ring[j]);
290 static struct crypto_shash *chcr_alloc_shash(unsigned int ds)
292 struct crypto_shash *base_hash = ERR_PTR(-EINVAL);
295 case SHA1_DIGEST_SIZE:
296 base_hash = crypto_alloc_shash("sha1", 0, 0);
298 case SHA224_DIGEST_SIZE:
299 base_hash = crypto_alloc_shash("sha224", 0, 0);
301 case SHA256_DIGEST_SIZE:
302 base_hash = crypto_alloc_shash("sha256", 0, 0);
304 case SHA384_DIGEST_SIZE:
305 base_hash = crypto_alloc_shash("sha384", 0, 0);
307 case SHA512_DIGEST_SIZE:
308 base_hash = crypto_alloc_shash("sha512", 0, 0);
315 static int chcr_compute_partial_hash(struct shash_desc *desc,
316 char *iopad, char *result_hash,
319 struct sha1_state sha1_st;
320 struct sha256_state sha256_st;
321 struct sha512_state sha512_st;
324 if (digest_size == SHA1_DIGEST_SIZE) {
325 error = crypto_shash_init(desc) ?:
326 crypto_shash_update(desc, iopad, SHA1_BLOCK_SIZE) ?:
327 crypto_shash_export(desc, (void *)&sha1_st);
328 memcpy(result_hash, sha1_st.state, SHA1_DIGEST_SIZE);
329 } else if (digest_size == SHA224_DIGEST_SIZE) {
330 error = crypto_shash_init(desc) ?:
331 crypto_shash_update(desc, iopad, SHA256_BLOCK_SIZE) ?:
332 crypto_shash_export(desc, (void *)&sha256_st);
333 memcpy(result_hash, sha256_st.state, SHA256_DIGEST_SIZE);
335 } else if (digest_size == SHA256_DIGEST_SIZE) {
336 error = crypto_shash_init(desc) ?:
337 crypto_shash_update(desc, iopad, SHA256_BLOCK_SIZE) ?:
338 crypto_shash_export(desc, (void *)&sha256_st);
339 memcpy(result_hash, sha256_st.state, SHA256_DIGEST_SIZE);
341 } else if (digest_size == SHA384_DIGEST_SIZE) {
342 error = crypto_shash_init(desc) ?:
343 crypto_shash_update(desc, iopad, SHA512_BLOCK_SIZE) ?:
344 crypto_shash_export(desc, (void *)&sha512_st);
345 memcpy(result_hash, sha512_st.state, SHA512_DIGEST_SIZE);
347 } else if (digest_size == SHA512_DIGEST_SIZE) {
348 error = crypto_shash_init(desc) ?:
349 crypto_shash_update(desc, iopad, SHA512_BLOCK_SIZE) ?:
350 crypto_shash_export(desc, (void *)&sha512_st);
351 memcpy(result_hash, sha512_st.state, SHA512_DIGEST_SIZE);
354 pr_err("Unknown digest size %d\n", digest_size);
359 static void chcr_change_order(char *buf, int ds)
363 if (ds == SHA512_DIGEST_SIZE) {
364 for (i = 0; i < (ds / sizeof(u64)); i++)
365 *((__be64 *)buf + i) =
366 cpu_to_be64(*((u64 *)buf + i));
368 for (i = 0; i < (ds / sizeof(u32)); i++)
369 *((__be32 *)buf + i) =
370 cpu_to_be32(*((u32 *)buf + i));
374 static inline int is_hmac(struct crypto_tfm *tfm)
376 struct crypto_alg *alg = tfm->__crt_alg;
377 struct chcr_alg_template *chcr_crypto_alg =
378 container_of(__crypto_ahash_alg(alg), struct chcr_alg_template,
380 if (chcr_crypto_alg->type == CRYPTO_ALG_TYPE_HMAC)
385 static void write_phys_cpl(struct cpl_rx_phys_dsgl *phys_cpl,
386 struct scatterlist *sg,
387 struct phys_sge_parm *sg_param,
390 struct phys_sge_pairs *to;
391 unsigned int len = 0, left_size = sg_param->obsize;
392 unsigned int nents = sg_param->nents, i, j = 0;
394 phys_cpl->op_to_tid = htonl(CPL_RX_PHYS_DSGL_OPCODE_V(CPL_RX_PHYS_DSGL)
395 | CPL_RX_PHYS_DSGL_ISRDMA_V(0));
396 phys_cpl->pcirlxorder_to_noofsgentr =
397 htonl(CPL_RX_PHYS_DSGL_PCIRLXORDER_V(0) |
398 CPL_RX_PHYS_DSGL_PCINOSNOOP_V(0) |
399 CPL_RX_PHYS_DSGL_PCITPHNTENB_V(0) |
400 CPL_RX_PHYS_DSGL_PCITPHNT_V(0) |
401 CPL_RX_PHYS_DSGL_DCAID_V(0) |
402 CPL_RX_PHYS_DSGL_NOOFSGENTR_V(nents));
403 phys_cpl->rss_hdr_int.opcode = CPL_RX_PHYS_ADDR;
404 phys_cpl->rss_hdr_int.qid = htons(sg_param->qid);
405 phys_cpl->rss_hdr_int.hash_val = 0;
406 phys_cpl->rss_hdr_int.channel = pci_chan_id;
407 to = (struct phys_sge_pairs *)((unsigned char *)phys_cpl +
408 sizeof(struct cpl_rx_phys_dsgl));
409 for (i = 0; nents && left_size; to++) {
410 for (j = 0; j < 8 && nents && left_size; j++, nents--) {
411 len = min(left_size, sg_dma_len(sg));
412 to->len[j] = htons(len);
413 to->addr[j] = cpu_to_be64(sg_dma_address(sg));
420 static inline int map_writesg_phys_cpl(struct device *dev,
421 struct cpl_rx_phys_dsgl *phys_cpl,
422 struct scatterlist *sg,
423 struct phys_sge_parm *sg_param,
426 if (!sg || !sg_param->nents)
429 sg_param->nents = dma_map_sg(dev, sg, sg_param->nents, DMA_FROM_DEVICE);
430 if (sg_param->nents == 0) {
431 pr_err("CHCR : DMA mapping failed\n");
434 write_phys_cpl(phys_cpl, sg, sg_param, pci_chan_id);
438 static inline int get_aead_subtype(struct crypto_aead *aead)
440 struct aead_alg *alg = crypto_aead_alg(aead);
441 struct chcr_alg_template *chcr_crypto_alg =
442 container_of(alg, struct chcr_alg_template, alg.aead);
443 return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
446 static inline int get_cryptoalg_subtype(struct crypto_tfm *tfm)
448 struct crypto_alg *alg = tfm->__crt_alg;
449 struct chcr_alg_template *chcr_crypto_alg =
450 container_of(alg, struct chcr_alg_template, alg.crypto);
452 return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
455 static inline void write_buffer_to_skb(struct sk_buff *skb,
461 skb->data_len += bfr_len;
462 skb->truesize += bfr_len;
463 get_page(virt_to_page(bfr));
464 skb_fill_page_desc(skb, *frags, virt_to_page(bfr),
465 offset_in_page(bfr), bfr_len);
471 write_sg_to_skb(struct sk_buff *skb, unsigned int *frags,
472 struct scatterlist *sg, unsigned int count)
475 unsigned int page_len;
478 skb->data_len += count;
479 skb->truesize += count;
482 if (!sg || (!(sg->length)))
486 page_len = min(sg->length, count);
487 skb_fill_page_desc(skb, *frags, spage, sg->offset, page_len);
494 static int cxgb4_is_crypto_q_full(struct net_device *dev, unsigned int idx)
496 struct adapter *adap = netdev2adap(dev);
497 struct sge_uld_txq_info *txq_info =
498 adap->sge.uld_txq_info[CXGB4_TX_CRYPTO];
499 struct sge_uld_txq *txq;
503 txq = &txq_info->uldtxq[idx];
504 spin_lock(&txq->sendq.lock);
507 spin_unlock(&txq->sendq.lock);
512 static int generate_copy_rrkey(struct ablk_ctx *ablkctx,
513 struct _key_ctx *key_ctx)
515 if (ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC) {
516 memcpy(key_ctx->key, ablkctx->rrkey, ablkctx->enckey_len);
519 ablkctx->key + (ablkctx->enckey_len >> 1),
520 ablkctx->enckey_len >> 1);
521 memcpy(key_ctx->key + (ablkctx->enckey_len >> 1),
522 ablkctx->rrkey, ablkctx->enckey_len >> 1);
526 static int chcr_sg_ent_in_wr(struct scatterlist *src,
527 struct scatterlist *dst,
533 int srclen = 0, dstlen = 0;
534 int srcsg = minsg, dstsg = 0;
538 while (src && dst && ((srcsg + 1) <= MAX_SKB_FRAGS) &&
539 space > (sgl_ent_len[srcsg + 1] + dsgl_ent_len[dstsg])) {
540 srclen += src->length;
542 while (dst && ((dstsg + 1) <= MAX_DSGL_ENT) &&
543 space > (sgl_ent_len[srcsg] + dsgl_ent_len[dstsg + 1])) {
544 if (srclen <= dstlen)
546 dstlen += dst->length;
552 *sent = srcsg - minsg;
554 return min(srclen, dstlen);
557 static int chcr_cipher_fallback(struct crypto_skcipher *cipher,
559 struct scatterlist *src,
560 struct scatterlist *dst,
563 unsigned short op_type)
567 SKCIPHER_REQUEST_ON_STACK(subreq, cipher);
568 skcipher_request_set_tfm(subreq, cipher);
569 skcipher_request_set_callback(subreq, flags, NULL, NULL);
570 skcipher_request_set_crypt(subreq, src, dst,
573 err = op_type ? crypto_skcipher_decrypt(subreq) :
574 crypto_skcipher_encrypt(subreq);
575 skcipher_request_zero(subreq);
580 static inline void create_wreq(struct chcr_context *ctx,
581 struct chcr_wr *chcr_req,
582 void *req, struct sk_buff *skb,
583 int kctx_len, int hash_sz,
588 struct uld_ctx *u_ctx = ULD_CTX(ctx);
589 int iv_loc = IV_DSGL;
590 int qid = u_ctx->lldi.rxq_ids[ctx->rx_qidx];
591 unsigned int immdatalen = 0, nr_frags = 0;
593 if (is_ofld_imm(skb)) {
594 immdatalen = skb->data_len;
595 iv_loc = IV_IMMEDIATE;
597 nr_frags = skb_shinfo(skb)->nr_frags;
600 chcr_req->wreq.op_to_cctx_size = FILL_WR_OP_CCTX_SIZE(immdatalen,
601 ((sizeof(chcr_req->key_ctx) + kctx_len) >> 4));
602 chcr_req->wreq.pld_size_hash_size =
603 htonl(FW_CRYPTO_LOOKASIDE_WR_PLD_SIZE_V(sgl_lengths[nr_frags]) |
604 FW_CRYPTO_LOOKASIDE_WR_HASH_SIZE_V(hash_sz));
605 chcr_req->wreq.len16_pkd =
606 htonl(FW_CRYPTO_LOOKASIDE_WR_LEN16_V(DIV_ROUND_UP(
607 (calc_tx_flits_ofld(skb) * 8), 16)));
608 chcr_req->wreq.cookie = cpu_to_be64((uintptr_t)req);
609 chcr_req->wreq.rx_chid_to_rx_q_id =
610 FILL_WR_RX_Q_ID(ctx->dev->rx_channel_id, qid,
611 is_iv ? iv_loc : IV_NOP, !!lcb,
614 chcr_req->ulptx.cmd_dest = FILL_ULPTX_CMD_DEST(ctx->tx_chan_id,
616 chcr_req->ulptx.len = htonl((DIV_ROUND_UP((calc_tx_flits_ofld(skb) * 8),
617 16) - ((sizeof(chcr_req->wreq)) >> 4)));
619 chcr_req->sc_imm.cmd_more = FILL_CMD_MORE(immdatalen);
620 chcr_req->sc_imm.len = cpu_to_be32(sizeof(struct cpl_tx_sec_pdu) +
621 sizeof(chcr_req->key_ctx) +
622 kctx_len + sc_len + immdatalen);
626 * create_cipher_wr - form the WR for cipher operations
628 * @ctx: crypto driver context of the request.
629 * @qid: ingress qid where response of this WR should be received.
630 * @op_type: encryption or decryption
632 static struct sk_buff *create_cipher_wr(struct cipher_wr_param *wrparam)
634 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(wrparam->req);
635 struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
636 struct uld_ctx *u_ctx = ULD_CTX(ctx);
637 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
638 struct sk_buff *skb = NULL;
639 struct chcr_wr *chcr_req;
640 struct cpl_rx_phys_dsgl *phys_cpl;
641 struct chcr_blkcipher_req_ctx *reqctx =
642 ablkcipher_request_ctx(wrparam->req);
643 struct phys_sge_parm sg_param;
644 unsigned int frags = 0, transhdr_len, phys_dsgl;
646 unsigned int ivsize = AES_BLOCK_SIZE, kctx_len;
647 gfp_t flags = wrparam->req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
648 GFP_KERNEL : GFP_ATOMIC;
649 struct adapter *adap = padap(ctx->dev);
651 phys_dsgl = get_space_for_phys_dsgl(reqctx->dst_nents);
653 kctx_len = (DIV_ROUND_UP(ablkctx->enckey_len, 16) * 16);
654 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, phys_dsgl);
655 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
660 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
661 chcr_req = __skb_put_zero(skb, transhdr_len);
662 chcr_req->sec_cpl.op_ivinsrtofst =
663 FILL_SEC_CPL_OP_IVINSR(ctx->dev->rx_channel_id, 2, 1);
665 chcr_req->sec_cpl.pldlen = htonl(ivsize + wrparam->bytes);
666 chcr_req->sec_cpl.aadstart_cipherstop_hi =
667 FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, ivsize + 1, 0);
669 chcr_req->sec_cpl.cipherstop_lo_authinsert =
670 FILL_SEC_CPL_AUTHINSERT(0, 0, 0, 0);
671 chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op, 0,
674 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 0,
677 chcr_req->key_ctx.ctx_hdr = ablkctx->key_ctx_hdr;
678 if ((reqctx->op == CHCR_DECRYPT_OP) &&
679 (!(get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm)) ==
680 CRYPTO_ALG_SUB_TYPE_CTR)) &&
681 (!(get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm)) ==
682 CRYPTO_ALG_SUB_TYPE_CTR_RFC3686))) {
683 generate_copy_rrkey(ablkctx, &chcr_req->key_ctx);
685 if ((ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC) ||
686 (ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CTR)) {
687 memcpy(chcr_req->key_ctx.key, ablkctx->key,
688 ablkctx->enckey_len);
690 memcpy(chcr_req->key_ctx.key, ablkctx->key +
691 (ablkctx->enckey_len >> 1),
692 ablkctx->enckey_len >> 1);
693 memcpy(chcr_req->key_ctx.key +
694 (ablkctx->enckey_len >> 1),
696 ablkctx->enckey_len >> 1);
699 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
700 sg_param.nents = reqctx->dst_nents;
701 sg_param.obsize = wrparam->bytes;
702 sg_param.qid = wrparam->qid;
703 error = map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl,
704 reqctx->dst, &sg_param,
709 skb_set_transport_header(skb, transhdr_len);
710 write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize);
711 write_sg_to_skb(skb, &frags, wrparam->srcsg, wrparam->bytes);
712 atomic_inc(&adap->chcr_stats.cipher_rqst);
713 create_wreq(ctx, chcr_req, &(wrparam->req->base), skb, kctx_len, 0, 1,
714 sizeof(struct cpl_rx_phys_dsgl) + phys_dsgl,
715 ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC);
722 return ERR_PTR(error);
725 static inline int chcr_keyctx_ck_size(unsigned int keylen)
729 if (keylen == AES_KEYSIZE_128)
730 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
731 else if (keylen == AES_KEYSIZE_192)
732 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
733 else if (keylen == AES_KEYSIZE_256)
734 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
740 static int chcr_cipher_fallback_setkey(struct crypto_ablkcipher *cipher,
744 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
745 struct chcr_context *ctx = crypto_ablkcipher_ctx(cipher);
746 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
749 crypto_skcipher_clear_flags(ablkctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
750 crypto_skcipher_set_flags(ablkctx->sw_cipher, cipher->base.crt_flags &
751 CRYPTO_TFM_REQ_MASK);
752 err = crypto_skcipher_setkey(ablkctx->sw_cipher, key, keylen);
753 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
755 crypto_skcipher_get_flags(ablkctx->sw_cipher) &
760 static int chcr_aes_cbc_setkey(struct crypto_ablkcipher *cipher,
764 struct chcr_context *ctx = crypto_ablkcipher_ctx(cipher);
765 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
766 unsigned int ck_size, context_size;
770 err = chcr_cipher_fallback_setkey(cipher, key, keylen);
774 ck_size = chcr_keyctx_ck_size(keylen);
775 alignment = ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192 ? 8 : 0;
776 memcpy(ablkctx->key, key, keylen);
777 ablkctx->enckey_len = keylen;
778 get_aes_decrypt_key(ablkctx->rrkey, ablkctx->key, keylen << 3);
779 context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
780 keylen + alignment) >> 4;
782 ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
784 ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CBC;
787 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
788 ablkctx->enckey_len = 0;
793 static int chcr_aes_ctr_setkey(struct crypto_ablkcipher *cipher,
797 struct chcr_context *ctx = crypto_ablkcipher_ctx(cipher);
798 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
799 unsigned int ck_size, context_size;
803 err = chcr_cipher_fallback_setkey(cipher, key, keylen);
806 ck_size = chcr_keyctx_ck_size(keylen);
807 alignment = (ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192) ? 8 : 0;
808 memcpy(ablkctx->key, key, keylen);
809 ablkctx->enckey_len = keylen;
810 context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
811 keylen + alignment) >> 4;
813 ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
815 ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CTR;
819 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
820 ablkctx->enckey_len = 0;
825 static int chcr_aes_rfc3686_setkey(struct crypto_ablkcipher *cipher,
829 struct chcr_context *ctx = crypto_ablkcipher_ctx(cipher);
830 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
831 unsigned int ck_size, context_size;
835 if (keylen < CTR_RFC3686_NONCE_SIZE)
837 memcpy(ablkctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
838 CTR_RFC3686_NONCE_SIZE);
840 keylen -= CTR_RFC3686_NONCE_SIZE;
841 err = chcr_cipher_fallback_setkey(cipher, key, keylen);
845 ck_size = chcr_keyctx_ck_size(keylen);
846 alignment = (ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192) ? 8 : 0;
847 memcpy(ablkctx->key, key, keylen);
848 ablkctx->enckey_len = keylen;
849 context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
850 keylen + alignment) >> 4;
852 ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
854 ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CTR;
858 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
859 ablkctx->enckey_len = 0;
863 static void ctr_add_iv(u8 *dstiv, u8 *srciv, u32 add)
865 unsigned int size = AES_BLOCK_SIZE;
866 __be32 *b = (__be32 *)(dstiv + size);
869 memcpy(dstiv, srciv, AES_BLOCK_SIZE);
870 for (; size >= 4; size -= 4) {
871 prev = be32_to_cpu(*--b);
881 static unsigned int adjust_ctr_overflow(u8 *iv, u32 bytes)
883 __be32 *b = (__be32 *)(iv + AES_BLOCK_SIZE);
885 u32 temp = be32_to_cpu(*--b);
888 c = (u64)temp + 1; // No of block can processed withou overflow
889 if ((bytes / AES_BLOCK_SIZE) > c)
890 bytes = c * AES_BLOCK_SIZE;
894 static int chcr_update_tweak(struct ablkcipher_request *req, u8 *iv)
896 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
897 struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
898 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
899 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
900 struct crypto_cipher *cipher;
905 cipher = ablkctx->aes_generic;
906 memcpy(iv, req->info, AES_BLOCK_SIZE);
908 keylen = ablkctx->enckey_len / 2;
909 key = ablkctx->key + keylen;
910 ret = crypto_cipher_setkey(cipher, key, keylen);
914 crypto_cipher_encrypt_one(cipher, iv, iv);
915 for (i = 0; i < (reqctx->processed / AES_BLOCK_SIZE); i++)
916 gf128mul_x_ble((le128 *)iv, (le128 *)iv);
918 crypto_cipher_decrypt_one(cipher, iv, iv);
923 static int chcr_update_cipher_iv(struct ablkcipher_request *req,
924 struct cpl_fw6_pld *fw6_pld, u8 *iv)
926 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
927 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
928 int subtype = get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm));
931 if (subtype == CRYPTO_ALG_SUB_TYPE_CTR)
932 ctr_add_iv(iv, req->info, (reqctx->processed /
934 else if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_RFC3686)
935 *(__be32 *)(reqctx->iv + CTR_RFC3686_NONCE_SIZE +
936 CTR_RFC3686_IV_SIZE) = cpu_to_be32((reqctx->processed /
937 AES_BLOCK_SIZE) + 1);
938 else if (subtype == CRYPTO_ALG_SUB_TYPE_XTS)
939 ret = chcr_update_tweak(req, iv);
940 else if (subtype == CRYPTO_ALG_SUB_TYPE_CBC) {
942 sg_pcopy_to_buffer(req->src, sg_nents(req->src), iv,
944 reqctx->processed - AES_BLOCK_SIZE);
946 memcpy(iv, &fw6_pld->data[2], AES_BLOCK_SIZE);
953 /* We need separate function for final iv because in rfc3686 Initial counter
954 * starts from 1 and buffer size of iv is 8 byte only which remains constant
955 * for subsequent update requests
958 static int chcr_final_cipher_iv(struct ablkcipher_request *req,
959 struct cpl_fw6_pld *fw6_pld, u8 *iv)
961 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
962 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
963 int subtype = get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm));
966 if (subtype == CRYPTO_ALG_SUB_TYPE_CTR)
967 ctr_add_iv(iv, req->info, (reqctx->processed /
969 else if (subtype == CRYPTO_ALG_SUB_TYPE_XTS)
970 ret = chcr_update_tweak(req, iv);
971 else if (subtype == CRYPTO_ALG_SUB_TYPE_CBC) {
973 sg_pcopy_to_buffer(req->src, sg_nents(req->src), iv,
975 reqctx->processed - AES_BLOCK_SIZE);
977 memcpy(iv, &fw6_pld->data[2], AES_BLOCK_SIZE);
985 static int chcr_handle_cipher_resp(struct ablkcipher_request *req,
986 unsigned char *input, int err)
988 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
989 struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
990 struct uld_ctx *u_ctx = ULD_CTX(ctx);
991 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
993 struct cpl_fw6_pld *fw6_pld = (struct cpl_fw6_pld *)input;
994 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
995 struct cipher_wr_param wrparam;
998 dma_unmap_sg(&u_ctx->lldi.pdev->dev, reqctx->dst, reqctx->dst_nents,
1002 kfree_skb(reqctx->skb);
1008 if (req->nbytes == reqctx->processed) {
1009 err = chcr_final_cipher_iv(req, fw6_pld, req->info);
1013 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1015 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
1021 wrparam.srcsg = scatterwalk_ffwd(reqctx->srcffwd, req->src,
1023 reqctx->dst = scatterwalk_ffwd(reqctx->dstffwd, reqctx->dstsg,
1025 if (!wrparam.srcsg || !reqctx->dst) {
1026 pr_err("Input sg list length less that nbytes\n");
1030 bytes = chcr_sg_ent_in_wr(wrparam.srcsg, reqctx->dst, 1,
1031 SPACE_LEFT(ablkctx->enckey_len),
1032 &wrparam.snent, &reqctx->dst_nents);
1033 if ((bytes + reqctx->processed) >= req->nbytes)
1034 bytes = req->nbytes - reqctx->processed;
1036 bytes = ROUND_16(bytes);
1037 err = chcr_update_cipher_iv(req, fw6_pld, reqctx->iv);
1041 if (unlikely(bytes == 0)) {
1042 err = chcr_cipher_fallback(ablkctx->sw_cipher,
1046 req->nbytes - reqctx->processed,
1052 if (get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm)) ==
1053 CRYPTO_ALG_SUB_TYPE_CTR)
1054 bytes = adjust_ctr_overflow(reqctx->iv, bytes);
1055 reqctx->processed += bytes;
1056 wrparam.qid = u_ctx->lldi.rxq_ids[ctx->rx_qidx];
1058 wrparam.bytes = bytes;
1059 skb = create_cipher_wr(&wrparam);
1061 pr_err("chcr : %s : Failed to form WR. No memory\n", __func__);
1065 skb->dev = u_ctx->lldi.ports[0];
1066 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
1070 free_new_sg(reqctx->newdstsg);
1071 reqctx->newdstsg = NULL;
1072 req->base.complete(&req->base, err);
1076 static int process_cipher(struct ablkcipher_request *req,
1078 struct sk_buff **skb,
1079 unsigned short op_type)
1081 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
1082 unsigned int ivsize = crypto_ablkcipher_ivsize(tfm);
1083 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
1084 struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
1085 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1086 struct cipher_wr_param wrparam;
1087 int bytes, nents, err = -EINVAL;
1089 reqctx->newdstsg = NULL;
1090 reqctx->processed = 0;
1093 if ((ablkctx->enckey_len == 0) || (ivsize > AES_BLOCK_SIZE) ||
1094 (req->nbytes == 0) ||
1095 (req->nbytes % crypto_ablkcipher_blocksize(tfm))) {
1096 pr_err("AES: Invalid value of Key Len %d nbytes %d IV Len %d\n",
1097 ablkctx->enckey_len, req->nbytes, ivsize);
1100 wrparam.srcsg = req->src;
1101 if (is_newsg(req->dst, &nents)) {
1102 reqctx->newdstsg = alloc_new_sg(req->dst, nents);
1103 if (IS_ERR(reqctx->newdstsg))
1104 return PTR_ERR(reqctx->newdstsg);
1105 reqctx->dstsg = reqctx->newdstsg;
1107 reqctx->dstsg = req->dst;
1109 bytes = chcr_sg_ent_in_wr(wrparam.srcsg, reqctx->dstsg, MIN_CIPHER_SG,
1110 SPACE_LEFT(ablkctx->enckey_len),
1112 &reqctx->dst_nents);
1113 if ((bytes + reqctx->processed) >= req->nbytes)
1114 bytes = req->nbytes - reqctx->processed;
1116 bytes = ROUND_16(bytes);
1117 if (unlikely(bytes > req->nbytes))
1118 bytes = req->nbytes;
1119 if (get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm)) ==
1120 CRYPTO_ALG_SUB_TYPE_CTR) {
1121 bytes = adjust_ctr_overflow(req->info, bytes);
1123 if (get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm)) ==
1124 CRYPTO_ALG_SUB_TYPE_CTR_RFC3686) {
1125 memcpy(reqctx->iv, ablkctx->nonce, CTR_RFC3686_NONCE_SIZE);
1126 memcpy(reqctx->iv + CTR_RFC3686_NONCE_SIZE, req->info,
1127 CTR_RFC3686_IV_SIZE);
1129 /* initialize counter portion of counter block */
1130 *(__be32 *)(reqctx->iv + CTR_RFC3686_NONCE_SIZE +
1131 CTR_RFC3686_IV_SIZE) = cpu_to_be32(1);
1135 memcpy(reqctx->iv, req->info, ivsize);
1137 if (unlikely(bytes == 0)) {
1138 err = chcr_cipher_fallback(ablkctx->sw_cipher,
1147 reqctx->processed = bytes;
1148 reqctx->dst = reqctx->dstsg;
1149 reqctx->op = op_type;
1152 wrparam.bytes = bytes;
1153 *skb = create_cipher_wr(&wrparam);
1155 err = PTR_ERR(*skb);
1161 free_new_sg(reqctx->newdstsg);
1162 reqctx->newdstsg = NULL;
1166 static int chcr_aes_encrypt(struct ablkcipher_request *req)
1168 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
1169 struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
1170 struct sk_buff *skb = NULL;
1172 struct uld_ctx *u_ctx = ULD_CTX(ctx);
1174 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1176 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1180 err = process_cipher(req, u_ctx->lldi.rxq_ids[ctx->rx_qidx], &skb,
1184 skb->dev = u_ctx->lldi.ports[0];
1185 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
1187 return -EINPROGRESS;
1190 static int chcr_aes_decrypt(struct ablkcipher_request *req)
1192 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
1193 struct chcr_context *ctx = crypto_ablkcipher_ctx(tfm);
1194 struct uld_ctx *u_ctx = ULD_CTX(ctx);
1195 struct sk_buff *skb = NULL;
1198 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1200 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1204 err = process_cipher(req, u_ctx->lldi.rxq_ids[ctx->rx_qidx], &skb,
1208 skb->dev = u_ctx->lldi.ports[0];
1209 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
1211 return -EINPROGRESS;
1214 static int chcr_device_init(struct chcr_context *ctx)
1216 struct uld_ctx *u_ctx = NULL;
1217 struct adapter *adap;
1219 int txq_perchan, txq_idx, ntxq;
1220 int err = 0, rxq_perchan, rxq_idx;
1222 id = smp_processor_id();
1224 u_ctx = assign_chcr_device();
1226 pr_err("chcr device assignment fails\n");
1229 ctx->dev = u_ctx->dev;
1230 adap = padap(ctx->dev);
1231 ntxq = min_not_zero((unsigned int)u_ctx->lldi.nrxq,
1232 adap->vres.ncrypto_fc);
1233 rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan;
1234 txq_perchan = ntxq / u_ctx->lldi.nchan;
1235 spin_lock(&ctx->dev->lock_chcr_dev);
1236 ctx->tx_chan_id = ctx->dev->tx_channel_id;
1237 ctx->dev->tx_channel_id = !ctx->dev->tx_channel_id;
1238 ctx->dev->rx_channel_id = 0;
1239 spin_unlock(&ctx->dev->lock_chcr_dev);
1240 rxq_idx = ctx->tx_chan_id * rxq_perchan;
1241 rxq_idx += id % rxq_perchan;
1242 txq_idx = ctx->tx_chan_id * txq_perchan;
1243 txq_idx += id % txq_perchan;
1244 ctx->rx_qidx = rxq_idx;
1245 ctx->tx_qidx = txq_idx;
1246 /* Channel Id used by SGE to forward packet to Host.
1247 * Same value should be used in cpl_fw6_pld RSS_CH field
1248 * by FW. Driver programs PCI channel ID to be used in fw
1249 * at the time of queue allocation with value "pi->tx_chan"
1251 ctx->pci_chan_id = txq_idx / txq_perchan;
1257 static int chcr_cra_init(struct crypto_tfm *tfm)
1259 struct crypto_alg *alg = tfm->__crt_alg;
1260 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1261 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1263 ablkctx->sw_cipher = crypto_alloc_skcipher(alg->cra_name, 0,
1264 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
1265 if (IS_ERR(ablkctx->sw_cipher)) {
1266 pr_err("failed to allocate fallback for %s\n", alg->cra_name);
1267 return PTR_ERR(ablkctx->sw_cipher);
1270 if (get_cryptoalg_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_XTS) {
1271 /* To update tweak*/
1272 ablkctx->aes_generic = crypto_alloc_cipher("aes-generic", 0, 0);
1273 if (IS_ERR(ablkctx->aes_generic)) {
1274 pr_err("failed to allocate aes cipher for tweak\n");
1275 return PTR_ERR(ablkctx->aes_generic);
1278 ablkctx->aes_generic = NULL;
1280 tfm->crt_ablkcipher.reqsize = sizeof(struct chcr_blkcipher_req_ctx);
1281 return chcr_device_init(crypto_tfm_ctx(tfm));
1284 static int chcr_rfc3686_init(struct crypto_tfm *tfm)
1286 struct crypto_alg *alg = tfm->__crt_alg;
1287 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1288 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1290 /*RFC3686 initialises IV counter value to 1, rfc3686(ctr(aes))
1291 * cannot be used as fallback in chcr_handle_cipher_response
1293 ablkctx->sw_cipher = crypto_alloc_skcipher("ctr(aes)", 0,
1294 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
1295 if (IS_ERR(ablkctx->sw_cipher)) {
1296 pr_err("failed to allocate fallback for %s\n", alg->cra_name);
1297 return PTR_ERR(ablkctx->sw_cipher);
1299 tfm->crt_ablkcipher.reqsize = sizeof(struct chcr_blkcipher_req_ctx);
1300 return chcr_device_init(crypto_tfm_ctx(tfm));
1304 static void chcr_cra_exit(struct crypto_tfm *tfm)
1306 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1307 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1309 crypto_free_skcipher(ablkctx->sw_cipher);
1310 if (ablkctx->aes_generic)
1311 crypto_free_cipher(ablkctx->aes_generic);
1314 static int get_alg_config(struct algo_param *params,
1315 unsigned int auth_size)
1317 switch (auth_size) {
1318 case SHA1_DIGEST_SIZE:
1319 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_160;
1320 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA1;
1321 params->result_size = SHA1_DIGEST_SIZE;
1323 case SHA224_DIGEST_SIZE:
1324 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
1325 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA224;
1326 params->result_size = SHA256_DIGEST_SIZE;
1328 case SHA256_DIGEST_SIZE:
1329 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
1330 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA256;
1331 params->result_size = SHA256_DIGEST_SIZE;
1333 case SHA384_DIGEST_SIZE:
1334 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_512;
1335 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA512_384;
1336 params->result_size = SHA512_DIGEST_SIZE;
1338 case SHA512_DIGEST_SIZE:
1339 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_512;
1340 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA512_512;
1341 params->result_size = SHA512_DIGEST_SIZE;
1344 pr_err("chcr : ERROR, unsupported digest size\n");
1350 static inline void chcr_free_shash(struct crypto_shash *base_hash)
1352 crypto_free_shash(base_hash);
1356 * create_hash_wr - Create hash work request
1357 * @req - Cipher req base
1359 static struct sk_buff *create_hash_wr(struct ahash_request *req,
1360 struct hash_wr_param *param)
1362 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1363 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1364 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1365 struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1366 struct sk_buff *skb = NULL;
1367 struct chcr_wr *chcr_req;
1368 unsigned int frags = 0, transhdr_len, iopad_alignment = 0;
1369 unsigned int digestsize = crypto_ahash_digestsize(tfm);
1370 unsigned int kctx_len = 0;
1371 u8 hash_size_in_response = 0;
1372 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1374 struct adapter *adap = padap(ctx->dev);
1376 iopad_alignment = KEYCTX_ALIGN_PAD(digestsize);
1377 kctx_len = param->alg_prm.result_size + iopad_alignment;
1378 if (param->opad_needed)
1379 kctx_len += param->alg_prm.result_size + iopad_alignment;
1381 if (req_ctx->result)
1382 hash_size_in_response = digestsize;
1384 hash_size_in_response = param->alg_prm.result_size;
1385 transhdr_len = HASH_TRANSHDR_SIZE(kctx_len);
1386 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
1390 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
1391 chcr_req = __skb_put_zero(skb, transhdr_len);
1393 chcr_req->sec_cpl.op_ivinsrtofst =
1394 FILL_SEC_CPL_OP_IVINSR(ctx->dev->rx_channel_id, 2, 0);
1395 chcr_req->sec_cpl.pldlen = htonl(param->bfr_len + param->sg_len);
1397 chcr_req->sec_cpl.aadstart_cipherstop_hi =
1398 FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, 0, 0);
1399 chcr_req->sec_cpl.cipherstop_lo_authinsert =
1400 FILL_SEC_CPL_AUTHINSERT(0, 1, 0, 0);
1401 chcr_req->sec_cpl.seqno_numivs =
1402 FILL_SEC_CPL_SCMD0_SEQNO(0, 0, 0, param->alg_prm.auth_mode,
1403 param->opad_needed, 0);
1405 chcr_req->sec_cpl.ivgen_hdrlen =
1406 FILL_SEC_CPL_IVGEN_HDRLEN(param->last, param->more, 0, 1, 0, 0);
1408 memcpy(chcr_req->key_ctx.key, req_ctx->partial_hash,
1409 param->alg_prm.result_size);
1411 if (param->opad_needed)
1412 memcpy(chcr_req->key_ctx.key +
1413 ((param->alg_prm.result_size <= 32) ? 32 :
1414 CHCR_HASH_MAX_DIGEST_SIZE),
1415 hmacctx->opad, param->alg_prm.result_size);
1417 chcr_req->key_ctx.ctx_hdr = FILL_KEY_CTX_HDR(CHCR_KEYCTX_NO_KEY,
1418 param->alg_prm.mk_size, 0,
1421 sizeof(chcr_req->key_ctx)) >> 4));
1422 chcr_req->sec_cpl.scmd1 = cpu_to_be64((u64)param->scmd1);
1424 skb_set_transport_header(skb, transhdr_len);
1425 if (param->bfr_len != 0)
1426 write_buffer_to_skb(skb, &frags, req_ctx->reqbfr,
1428 if (param->sg_len != 0)
1429 write_sg_to_skb(skb, &frags, req->src, param->sg_len);
1430 atomic_inc(&adap->chcr_stats.digest_rqst);
1431 create_wreq(ctx, chcr_req, &req->base, skb, kctx_len,
1432 hash_size_in_response, 0, DUMMY_BYTES, 0);
1438 static int chcr_ahash_update(struct ahash_request *req)
1440 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1441 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1442 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
1443 struct uld_ctx *u_ctx = NULL;
1444 struct sk_buff *skb;
1445 u8 remainder = 0, bs;
1446 unsigned int nbytes = req->nbytes;
1447 struct hash_wr_param params;
1449 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1451 u_ctx = ULD_CTX(ctx);
1452 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1454 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1458 if (nbytes + req_ctx->reqlen >= bs) {
1459 remainder = (nbytes + req_ctx->reqlen) % bs;
1460 nbytes = nbytes + req_ctx->reqlen - remainder;
1462 sg_pcopy_to_buffer(req->src, sg_nents(req->src), req_ctx->reqbfr
1463 + req_ctx->reqlen, nbytes, 0);
1464 req_ctx->reqlen += nbytes;
1468 params.opad_needed = 0;
1471 params.sg_len = nbytes - req_ctx->reqlen;
1472 params.bfr_len = req_ctx->reqlen;
1474 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1475 req_ctx->result = 0;
1476 req_ctx->data_len += params.sg_len + params.bfr_len;
1477 skb = create_hash_wr(req, ¶ms);
1484 temp = req_ctx->reqbfr;
1485 req_ctx->reqbfr = req_ctx->skbfr;
1486 req_ctx->skbfr = temp;
1487 sg_pcopy_to_buffer(req->src, sg_nents(req->src),
1488 req_ctx->reqbfr, remainder, req->nbytes -
1491 req_ctx->reqlen = remainder;
1492 skb->dev = u_ctx->lldi.ports[0];
1493 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
1496 return -EINPROGRESS;
1499 static void create_last_hash_block(char *bfr_ptr, unsigned int bs, u64 scmd1)
1501 memset(bfr_ptr, 0, bs);
1504 *(__be64 *)(bfr_ptr + 56) = cpu_to_be64(scmd1 << 3);
1506 *(__be64 *)(bfr_ptr + 120) = cpu_to_be64(scmd1 << 3);
1509 static int chcr_ahash_final(struct ahash_request *req)
1511 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1512 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1513 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
1514 struct hash_wr_param params;
1515 struct sk_buff *skb;
1516 struct uld_ctx *u_ctx = NULL;
1517 u8 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1519 u_ctx = ULD_CTX(ctx);
1520 if (is_hmac(crypto_ahash_tfm(rtfm)))
1521 params.opad_needed = 1;
1523 params.opad_needed = 0;
1525 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1526 req_ctx->result = 1;
1527 params.bfr_len = req_ctx->reqlen;
1528 req_ctx->data_len += params.bfr_len + params.sg_len;
1529 if (req_ctx->reqlen == 0) {
1530 create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1534 params.bfr_len = bs;
1537 params.scmd1 = req_ctx->data_len;
1541 skb = create_hash_wr(req, ¶ms);
1545 skb->dev = u_ctx->lldi.ports[0];
1546 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
1548 return -EINPROGRESS;
1551 static int chcr_ahash_finup(struct ahash_request *req)
1553 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1554 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1555 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
1556 struct uld_ctx *u_ctx = NULL;
1557 struct sk_buff *skb;
1558 struct hash_wr_param params;
1561 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1562 u_ctx = ULD_CTX(ctx);
1564 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1566 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1570 if (is_hmac(crypto_ahash_tfm(rtfm)))
1571 params.opad_needed = 1;
1573 params.opad_needed = 0;
1575 params.sg_len = req->nbytes;
1576 params.bfr_len = req_ctx->reqlen;
1577 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1578 req_ctx->data_len += params.bfr_len + params.sg_len;
1579 req_ctx->result = 1;
1580 if ((req_ctx->reqlen + req->nbytes) == 0) {
1581 create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1585 params.bfr_len = bs;
1587 params.scmd1 = req_ctx->data_len;
1592 skb = create_hash_wr(req, ¶ms);
1596 skb->dev = u_ctx->lldi.ports[0];
1597 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
1600 return -EINPROGRESS;
1603 static int chcr_ahash_digest(struct ahash_request *req)
1605 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1606 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1607 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
1608 struct uld_ctx *u_ctx = NULL;
1609 struct sk_buff *skb;
1610 struct hash_wr_param params;
1614 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1616 u_ctx = ULD_CTX(ctx);
1617 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1619 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1623 if (is_hmac(crypto_ahash_tfm(rtfm)))
1624 params.opad_needed = 1;
1626 params.opad_needed = 0;
1630 params.sg_len = req->nbytes;
1633 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1634 req_ctx->result = 1;
1635 req_ctx->data_len += params.bfr_len + params.sg_len;
1637 if (req->nbytes == 0) {
1638 create_last_hash_block(req_ctx->reqbfr, bs, 0);
1640 params.bfr_len = bs;
1643 skb = create_hash_wr(req, ¶ms);
1647 skb->dev = u_ctx->lldi.ports[0];
1648 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
1650 return -EINPROGRESS;
1653 static int chcr_ahash_export(struct ahash_request *areq, void *out)
1655 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1656 struct chcr_ahash_req_ctx *state = out;
1658 state->reqlen = req_ctx->reqlen;
1659 state->data_len = req_ctx->data_len;
1660 memcpy(state->bfr1, req_ctx->reqbfr, req_ctx->reqlen);
1661 memcpy(state->partial_hash, req_ctx->partial_hash,
1662 CHCR_HASH_MAX_DIGEST_SIZE);
1666 static int chcr_ahash_import(struct ahash_request *areq, const void *in)
1668 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1669 struct chcr_ahash_req_ctx *state = (struct chcr_ahash_req_ctx *)in;
1671 req_ctx->reqlen = state->reqlen;
1672 req_ctx->data_len = state->data_len;
1673 req_ctx->reqbfr = req_ctx->bfr1;
1674 req_ctx->skbfr = req_ctx->bfr2;
1675 memcpy(req_ctx->bfr1, state->bfr1, CHCR_HASH_MAX_BLOCK_SIZE_128);
1676 memcpy(req_ctx->partial_hash, state->partial_hash,
1677 CHCR_HASH_MAX_DIGEST_SIZE);
1681 static int chcr_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
1682 unsigned int keylen)
1684 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1685 struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1686 unsigned int digestsize = crypto_ahash_digestsize(tfm);
1687 unsigned int bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1688 unsigned int i, err = 0, updated_digestsize;
1690 SHASH_DESC_ON_STACK(shash, hmacctx->base_hash);
1692 /* use the key to calculate the ipad and opad. ipad will sent with the
1693 * first request's data. opad will be sent with the final hash result
1694 * ipad in hmacctx->ipad and opad in hmacctx->opad location
1696 shash->tfm = hmacctx->base_hash;
1697 shash->flags = crypto_shash_get_flags(hmacctx->base_hash);
1699 err = crypto_shash_digest(shash, key, keylen,
1703 keylen = digestsize;
1705 memcpy(hmacctx->ipad, key, keylen);
1707 memset(hmacctx->ipad + keylen, 0, bs - keylen);
1708 memcpy(hmacctx->opad, hmacctx->ipad, bs);
1710 for (i = 0; i < bs / sizeof(int); i++) {
1711 *((unsigned int *)(&hmacctx->ipad) + i) ^= IPAD_DATA;
1712 *((unsigned int *)(&hmacctx->opad) + i) ^= OPAD_DATA;
1715 updated_digestsize = digestsize;
1716 if (digestsize == SHA224_DIGEST_SIZE)
1717 updated_digestsize = SHA256_DIGEST_SIZE;
1718 else if (digestsize == SHA384_DIGEST_SIZE)
1719 updated_digestsize = SHA512_DIGEST_SIZE;
1720 err = chcr_compute_partial_hash(shash, hmacctx->ipad,
1721 hmacctx->ipad, digestsize);
1724 chcr_change_order(hmacctx->ipad, updated_digestsize);
1726 err = chcr_compute_partial_hash(shash, hmacctx->opad,
1727 hmacctx->opad, digestsize);
1730 chcr_change_order(hmacctx->opad, updated_digestsize);
1735 static int chcr_aes_xts_setkey(struct crypto_ablkcipher *cipher, const u8 *key,
1736 unsigned int key_len)
1738 struct chcr_context *ctx = crypto_ablkcipher_ctx(cipher);
1739 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1740 unsigned short context_size = 0;
1743 err = chcr_cipher_fallback_setkey(cipher, key, key_len);
1747 memcpy(ablkctx->key, key, key_len);
1748 ablkctx->enckey_len = key_len;
1749 get_aes_decrypt_key(ablkctx->rrkey, ablkctx->key, key_len << 2);
1750 context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD + key_len) >> 4;
1751 ablkctx->key_ctx_hdr =
1752 FILL_KEY_CTX_HDR((key_len == AES_KEYSIZE_256) ?
1753 CHCR_KEYCTX_CIPHER_KEY_SIZE_128 :
1754 CHCR_KEYCTX_CIPHER_KEY_SIZE_256,
1755 CHCR_KEYCTX_NO_KEY, 1,
1757 ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_XTS;
1760 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
1761 ablkctx->enckey_len = 0;
1766 static int chcr_sha_init(struct ahash_request *areq)
1768 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1769 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1770 int digestsize = crypto_ahash_digestsize(tfm);
1772 req_ctx->data_len = 0;
1773 req_ctx->reqlen = 0;
1774 req_ctx->reqbfr = req_ctx->bfr1;
1775 req_ctx->skbfr = req_ctx->bfr2;
1776 req_ctx->skb = NULL;
1777 req_ctx->result = 0;
1778 copy_hash_init_values(req_ctx->partial_hash, digestsize);
1782 static int chcr_sha_cra_init(struct crypto_tfm *tfm)
1784 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1785 sizeof(struct chcr_ahash_req_ctx));
1786 return chcr_device_init(crypto_tfm_ctx(tfm));
1789 static int chcr_hmac_init(struct ahash_request *areq)
1791 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1792 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(areq);
1793 struct chcr_context *ctx = crypto_tfm_ctx(crypto_ahash_tfm(rtfm));
1794 struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1795 unsigned int digestsize = crypto_ahash_digestsize(rtfm);
1796 unsigned int bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1798 chcr_sha_init(areq);
1799 req_ctx->data_len = bs;
1800 if (is_hmac(crypto_ahash_tfm(rtfm))) {
1801 if (digestsize == SHA224_DIGEST_SIZE)
1802 memcpy(req_ctx->partial_hash, hmacctx->ipad,
1803 SHA256_DIGEST_SIZE);
1804 else if (digestsize == SHA384_DIGEST_SIZE)
1805 memcpy(req_ctx->partial_hash, hmacctx->ipad,
1806 SHA512_DIGEST_SIZE);
1808 memcpy(req_ctx->partial_hash, hmacctx->ipad,
1814 static int chcr_hmac_cra_init(struct crypto_tfm *tfm)
1816 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1817 struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1818 unsigned int digestsize =
1819 crypto_ahash_digestsize(__crypto_ahash_cast(tfm));
1821 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1822 sizeof(struct chcr_ahash_req_ctx));
1823 hmacctx->base_hash = chcr_alloc_shash(digestsize);
1824 if (IS_ERR(hmacctx->base_hash))
1825 return PTR_ERR(hmacctx->base_hash);
1826 return chcr_device_init(crypto_tfm_ctx(tfm));
1829 static void chcr_hmac_cra_exit(struct crypto_tfm *tfm)
1831 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1832 struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1834 if (hmacctx->base_hash) {
1835 chcr_free_shash(hmacctx->base_hash);
1836 hmacctx->base_hash = NULL;
1840 static int is_newsg(struct scatterlist *sgl, unsigned int *newents)
1846 if (sgl->length > CHCR_SG_SIZE)
1848 nents += DIV_ROUND_UP(sgl->length, CHCR_SG_SIZE);
1855 static inline void free_new_sg(struct scatterlist *sgl)
1860 static struct scatterlist *alloc_new_sg(struct scatterlist *sgl,
1863 struct scatterlist *newsg, *sg;
1864 int i, len, processed = 0;
1868 newsg = kmalloc_array(nents, sizeof(struct scatterlist), GFP_KERNEL);
1870 return ERR_PTR(-ENOMEM);
1872 sg_init_table(sg, nents);
1873 offset = sgl->offset;
1874 spage = sg_page(sgl);
1875 for (i = 0; i < nents; i++) {
1876 len = min_t(u32, sgl->length - processed, CHCR_SG_SIZE);
1877 sg_set_page(sg, spage, len, offset);
1880 if (offset >= PAGE_SIZE) {
1881 offset = offset % PAGE_SIZE;
1884 if (processed == sgl->length) {
1889 spage = sg_page(sgl);
1890 offset = sgl->offset;
1897 static int chcr_copy_assoc(struct aead_request *req,
1898 struct chcr_aead_ctx *ctx)
1900 SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
1902 skcipher_request_set_tfm(skreq, ctx->null);
1903 skcipher_request_set_callback(skreq, aead_request_flags(req),
1905 skcipher_request_set_crypt(skreq, req->src, req->dst, req->assoclen,
1908 return crypto_skcipher_encrypt(skreq);
1910 static int chcr_aead_need_fallback(struct aead_request *req, int src_nent,
1911 int aadmax, int wrlen,
1912 unsigned short op_type)
1914 unsigned int authsize = crypto_aead_authsize(crypto_aead_reqtfm(req));
1916 if (((req->cryptlen - (op_type ? authsize : 0)) == 0) ||
1917 (req->assoclen > aadmax) ||
1918 (src_nent > MAX_SKB_FRAGS) ||
1919 (wrlen > MAX_WR_SIZE))
1924 static int chcr_aead_fallback(struct aead_request *req, unsigned short op_type)
1926 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1927 struct chcr_context *ctx = crypto_aead_ctx(tfm);
1928 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1929 struct aead_request *subreq = aead_request_ctx(req);
1931 aead_request_set_tfm(subreq, aeadctx->sw_cipher);
1932 aead_request_set_callback(subreq, req->base.flags,
1933 req->base.complete, req->base.data);
1934 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
1936 aead_request_set_ad(subreq, req->assoclen);
1937 return op_type ? crypto_aead_decrypt(subreq) :
1938 crypto_aead_encrypt(subreq);
1941 static struct sk_buff *create_authenc_wr(struct aead_request *req,
1944 unsigned short op_type)
1946 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1947 struct chcr_context *ctx = crypto_aead_ctx(tfm);
1948 struct uld_ctx *u_ctx = ULD_CTX(ctx);
1949 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
1950 struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
1951 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
1952 struct sk_buff *skb = NULL;
1953 struct chcr_wr *chcr_req;
1954 struct cpl_rx_phys_dsgl *phys_cpl;
1955 struct phys_sge_parm sg_param;
1956 struct scatterlist *src;
1957 unsigned int frags = 0, transhdr_len;
1958 unsigned int ivsize = crypto_aead_ivsize(tfm), dst_size = 0;
1959 unsigned int kctx_len = 0, nents;
1960 unsigned short stop_offset = 0;
1961 unsigned int assoclen = req->assoclen;
1962 unsigned int authsize = crypto_aead_authsize(tfm);
1963 int error = -EINVAL, src_nent;
1965 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1967 struct adapter *adap = padap(ctx->dev);
1969 reqctx->newdstsg = NULL;
1970 dst_size = req->assoclen + req->cryptlen + (op_type ? -authsize :
1972 if (aeadctx->enckey_len == 0 || (req->cryptlen <= 0))
1975 if (op_type && req->cryptlen < crypto_aead_authsize(tfm))
1977 src_nent = sg_nents_for_len(req->src, req->assoclen + req->cryptlen);
1980 src = scatterwalk_ffwd(reqctx->srcffwd, req->src, req->assoclen);
1982 if (req->src != req->dst) {
1983 error = chcr_copy_assoc(req, aeadctx);
1985 return ERR_PTR(error);
1987 if (dst_size && is_newsg(req->dst, &nents)) {
1988 reqctx->newdstsg = alloc_new_sg(req->dst, nents);
1989 if (IS_ERR(reqctx->newdstsg))
1990 return ERR_CAST(reqctx->newdstsg);
1991 reqctx->dst = scatterwalk_ffwd(reqctx->dstffwd,
1992 reqctx->newdstsg, req->assoclen);
1994 if (req->src == req->dst)
1997 reqctx->dst = scatterwalk_ffwd(reqctx->dstffwd,
1998 req->dst, req->assoclen);
2000 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_NULL) {
2004 reqctx->dst_nents = sg_nents_for_len(reqctx->dst, req->cryptlen +
2005 (op_type ? -authsize : authsize));
2006 if (reqctx->dst_nents < 0) {
2007 pr_err("AUTHENC:Invalid Destination sg entries\n");
2011 dst_size = get_space_for_phys_dsgl(reqctx->dst_nents);
2012 kctx_len = (ntohl(KEY_CONTEXT_CTX_LEN_V(aeadctx->key_ctx_hdr)) << 4)
2013 - sizeof(chcr_req->key_ctx);
2014 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
2015 if (chcr_aead_need_fallback(req, src_nent + MIN_AUTH_SG,
2017 transhdr_len + (sgl_len(src_nent + MIN_AUTH_SG) * 8),
2019 atomic_inc(&adap->chcr_stats.fallback);
2020 free_new_sg(reqctx->newdstsg);
2021 reqctx->newdstsg = NULL;
2022 return ERR_PTR(chcr_aead_fallback(req, op_type));
2024 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
2030 /* LLD is going to write the sge hdr. */
2031 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
2034 chcr_req = __skb_put_zero(skb, transhdr_len);
2036 stop_offset = (op_type == CHCR_ENCRYPT_OP) ? 0 : authsize;
2039 * Input order is AAD,IV and Payload. where IV should be included as
2040 * the part of authdata. All other fields should be filled according
2041 * to the hardware spec
2043 chcr_req->sec_cpl.op_ivinsrtofst =
2044 FILL_SEC_CPL_OP_IVINSR(ctx->dev->rx_channel_id, 2,
2045 (ivsize ? (assoclen + 1) : 0));
2046 chcr_req->sec_cpl.pldlen = htonl(assoclen + ivsize + req->cryptlen);
2047 chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
2048 assoclen ? 1 : 0, assoclen,
2049 assoclen + ivsize + 1,
2050 (stop_offset & 0x1F0) >> 4);
2051 chcr_req->sec_cpl.cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(
2053 null ? 0 : assoclen + ivsize + 1,
2054 stop_offset, stop_offset);
2055 chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(op_type,
2056 (op_type == CHCR_ENCRYPT_OP) ? 1 : 0,
2057 CHCR_SCMD_CIPHER_MODE_AES_CBC,
2058 actx->auth_mode, aeadctx->hmac_ctrl,
2060 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
2063 chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
2064 if (op_type == CHCR_ENCRYPT_OP)
2065 memcpy(chcr_req->key_ctx.key, aeadctx->key,
2066 aeadctx->enckey_len);
2068 memcpy(chcr_req->key_ctx.key, actx->dec_rrkey,
2069 aeadctx->enckey_len);
2071 memcpy(chcr_req->key_ctx.key + (DIV_ROUND_UP(aeadctx->enckey_len, 16) <<
2072 4), actx->h_iopad, kctx_len -
2073 (DIV_ROUND_UP(aeadctx->enckey_len, 16) << 4));
2075 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
2076 sg_param.nents = reqctx->dst_nents;
2077 sg_param.obsize = req->cryptlen + (op_type ? -authsize : authsize);
2079 error = map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl,
2080 reqctx->dst, &sg_param,
2085 skb_set_transport_header(skb, transhdr_len);
2089 write_sg_to_skb(skb, &frags, req->src, assoclen);
2092 write_buffer_to_skb(skb, &frags, req->iv, ivsize);
2093 write_sg_to_skb(skb, &frags, src, req->cryptlen);
2094 atomic_inc(&adap->chcr_stats.cipher_rqst);
2095 create_wreq(ctx, chcr_req, &req->base, skb, kctx_len, size, 1,
2096 sizeof(struct cpl_rx_phys_dsgl) + dst_size, 0);
2105 free_new_sg(reqctx->newdstsg);
2106 reqctx->newdstsg = NULL;
2107 return ERR_PTR(error);
2110 static int set_msg_len(u8 *block, unsigned int msglen, int csize)
2114 memset(block, 0, csize);
2119 else if (msglen > (unsigned int)(1 << (8 * csize)))
2122 data = cpu_to_be32(msglen);
2123 memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
2128 static void generate_b0(struct aead_request *req,
2129 struct chcr_aead_ctx *aeadctx,
2130 unsigned short op_type)
2132 unsigned int l, lp, m;
2134 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2135 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2136 u8 *b0 = reqctx->scratch_pad;
2138 m = crypto_aead_authsize(aead);
2140 memcpy(b0, reqctx->iv, 16);
2145 /* set m, bits 3-5 */
2146 *b0 |= (8 * ((m - 2) / 2));
2148 /* set adata, bit 6, if associated data is used */
2151 rc = set_msg_len(b0 + 16 - l,
2152 (op_type == CHCR_DECRYPT_OP) ?
2153 req->cryptlen - m : req->cryptlen, l);
2156 static inline int crypto_ccm_check_iv(const u8 *iv)
2158 /* 2 <= L <= 8, so 1 <= L' <= 7. */
2159 if (iv[0] < 1 || iv[0] > 7)
2165 static int ccm_format_packet(struct aead_request *req,
2166 struct chcr_aead_ctx *aeadctx,
2167 unsigned int sub_type,
2168 unsigned short op_type)
2170 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2173 if (sub_type == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
2175 memcpy(reqctx->iv + 1, &aeadctx->salt[0], 3);
2176 memcpy(reqctx->iv + 4, req->iv, 8);
2177 memset(reqctx->iv + 12, 0, 4);
2178 *((unsigned short *)(reqctx->scratch_pad + 16)) =
2179 htons(req->assoclen - 8);
2181 memcpy(reqctx->iv, req->iv, 16);
2182 *((unsigned short *)(reqctx->scratch_pad + 16)) =
2183 htons(req->assoclen);
2185 generate_b0(req, aeadctx, op_type);
2186 /* zero the ctr value */
2187 memset(reqctx->iv + 15 - reqctx->iv[0], 0, reqctx->iv[0] + 1);
2191 static void fill_sec_cpl_for_aead(struct cpl_tx_sec_pdu *sec_cpl,
2192 unsigned int dst_size,
2193 struct aead_request *req,
2194 unsigned short op_type,
2195 struct chcr_context *chcrctx)
2197 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2198 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2199 unsigned int ivsize = AES_BLOCK_SIZE;
2200 unsigned int cipher_mode = CHCR_SCMD_CIPHER_MODE_AES_CCM;
2201 unsigned int mac_mode = CHCR_SCMD_AUTH_MODE_CBCMAC;
2202 unsigned int c_id = chcrctx->dev->rx_channel_id;
2203 unsigned int ccm_xtra;
2204 unsigned int tag_offset = 0, auth_offset = 0;
2205 unsigned int assoclen;
2207 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
2208 assoclen = req->assoclen - 8;
2210 assoclen = req->assoclen;
2211 ccm_xtra = CCM_B0_SIZE +
2212 ((assoclen) ? CCM_AAD_FIELD_SIZE : 0);
2214 auth_offset = req->cryptlen ?
2215 (assoclen + ivsize + 1 + ccm_xtra) : 0;
2216 if (op_type == CHCR_DECRYPT_OP) {
2217 if (crypto_aead_authsize(tfm) != req->cryptlen)
2218 tag_offset = crypto_aead_authsize(tfm);
2224 sec_cpl->op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(c_id,
2225 2, (ivsize ? (assoclen + 1) : 0) +
2228 htonl(assoclen + ivsize + req->cryptlen + ccm_xtra);
2229 /* For CCM there wil be b0 always. So AAD start will be 1 always */
2230 sec_cpl->aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
2231 1, assoclen + ccm_xtra, assoclen
2232 + ivsize + 1 + ccm_xtra, 0);
2234 sec_cpl->cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(0,
2235 auth_offset, tag_offset,
2236 (op_type == CHCR_ENCRYPT_OP) ? 0 :
2237 crypto_aead_authsize(tfm));
2238 sec_cpl->seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(op_type,
2239 (op_type == CHCR_ENCRYPT_OP) ? 0 : 1,
2240 cipher_mode, mac_mode,
2241 aeadctx->hmac_ctrl, ivsize >> 1);
2243 sec_cpl->ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1, 0,
2247 int aead_ccm_validate_input(unsigned short op_type,
2248 struct aead_request *req,
2249 struct chcr_aead_ctx *aeadctx,
2250 unsigned int sub_type)
2252 if (sub_type != CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
2253 if (crypto_ccm_check_iv(req->iv)) {
2254 pr_err("CCM: IV check fails\n");
2258 if (req->assoclen != 16 && req->assoclen != 20) {
2259 pr_err("RFC4309: Invalid AAD length %d\n",
2264 if (aeadctx->enckey_len == 0) {
2265 pr_err("CCM: Encryption key not set\n");
2271 unsigned int fill_aead_req_fields(struct sk_buff *skb,
2272 struct aead_request *req,
2273 struct scatterlist *src,
2274 unsigned int ivsize,
2275 struct chcr_aead_ctx *aeadctx)
2277 unsigned int frags = 0;
2278 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2279 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2280 /* b0 and aad length(if available) */
2282 write_buffer_to_skb(skb, &frags, reqctx->scratch_pad, CCM_B0_SIZE +
2283 (req->assoclen ? CCM_AAD_FIELD_SIZE : 0));
2284 if (req->assoclen) {
2285 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
2286 write_sg_to_skb(skb, &frags, req->src,
2289 write_sg_to_skb(skb, &frags, req->src, req->assoclen);
2291 write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize);
2293 write_sg_to_skb(skb, &frags, src, req->cryptlen);
2298 static struct sk_buff *create_aead_ccm_wr(struct aead_request *req,
2301 unsigned short op_type)
2303 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2304 struct chcr_context *ctx = crypto_aead_ctx(tfm);
2305 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2306 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2307 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2308 struct sk_buff *skb = NULL;
2309 struct chcr_wr *chcr_req;
2310 struct cpl_rx_phys_dsgl *phys_cpl;
2311 struct phys_sge_parm sg_param;
2312 struct scatterlist *src;
2313 unsigned int frags = 0, transhdr_len, ivsize = AES_BLOCK_SIZE;
2314 unsigned int dst_size = 0, kctx_len, nents;
2315 unsigned int sub_type;
2316 unsigned int authsize = crypto_aead_authsize(tfm);
2317 int error = -EINVAL, src_nent;
2318 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
2320 struct adapter *adap = padap(ctx->dev);
2322 dst_size = req->assoclen + req->cryptlen + (op_type ? -authsize :
2324 reqctx->newdstsg = NULL;
2325 if (op_type && req->cryptlen < crypto_aead_authsize(tfm))
2327 src_nent = sg_nents_for_len(req->src, req->assoclen + req->cryptlen);
2331 sub_type = get_aead_subtype(tfm);
2332 src = scatterwalk_ffwd(reqctx->srcffwd, req->src, req->assoclen);
2333 if (req->src != req->dst) {
2334 error = chcr_copy_assoc(req, aeadctx);
2336 pr_err("AAD copy to destination buffer fails\n");
2337 return ERR_PTR(error);
2340 if (dst_size && is_newsg(req->dst, &nents)) {
2341 reqctx->newdstsg = alloc_new_sg(req->dst, nents);
2342 if (IS_ERR(reqctx->newdstsg))
2343 return ERR_CAST(reqctx->newdstsg);
2344 reqctx->dst = scatterwalk_ffwd(reqctx->dstffwd,
2345 reqctx->newdstsg, req->assoclen);
2347 if (req->src == req->dst)
2350 reqctx->dst = scatterwalk_ffwd(reqctx->dstffwd,
2351 req->dst, req->assoclen);
2353 reqctx->dst_nents = sg_nents_for_len(reqctx->dst, req->cryptlen +
2354 (op_type ? -authsize : authsize));
2355 if (reqctx->dst_nents < 0) {
2356 pr_err("CCM:Invalid Destination sg entries\n");
2360 error = aead_ccm_validate_input(op_type, req, aeadctx, sub_type);
2364 dst_size = get_space_for_phys_dsgl(reqctx->dst_nents);
2365 kctx_len = ((DIV_ROUND_UP(aeadctx->enckey_len, 16)) << 4) * 2;
2366 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
2367 if (chcr_aead_need_fallback(req, src_nent + MIN_CCM_SG,
2368 T6_MAX_AAD_SIZE - 18,
2369 transhdr_len + (sgl_len(src_nent + MIN_CCM_SG) * 8),
2371 atomic_inc(&adap->chcr_stats.fallback);
2372 free_new_sg(reqctx->newdstsg);
2373 reqctx->newdstsg = NULL;
2374 return ERR_PTR(chcr_aead_fallback(req, op_type));
2377 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
2384 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
2386 chcr_req = __skb_put_zero(skb, transhdr_len);
2388 fill_sec_cpl_for_aead(&chcr_req->sec_cpl, dst_size, req, op_type, ctx);
2390 chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
2391 memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
2392 memcpy(chcr_req->key_ctx.key + (DIV_ROUND_UP(aeadctx->enckey_len, 16) *
2393 16), aeadctx->key, aeadctx->enckey_len);
2395 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
2396 error = ccm_format_packet(req, aeadctx, sub_type, op_type);
2400 sg_param.nents = reqctx->dst_nents;
2401 sg_param.obsize = req->cryptlen + (op_type ? -authsize : authsize);
2403 error = map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl,
2404 reqctx->dst, &sg_param, ctx->pci_chan_id);
2408 skb_set_transport_header(skb, transhdr_len);
2409 frags = fill_aead_req_fields(skb, req, src, ivsize, aeadctx);
2410 atomic_inc(&adap->chcr_stats.aead_rqst);
2411 create_wreq(ctx, chcr_req, &req->base, skb, kctx_len, 0, 1,
2412 sizeof(struct cpl_rx_phys_dsgl) + dst_size, 0);
2419 free_new_sg(reqctx->newdstsg);
2420 reqctx->newdstsg = NULL;
2421 return ERR_PTR(error);
2424 static struct sk_buff *create_gcm_wr(struct aead_request *req,
2427 unsigned short op_type)
2429 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2430 struct chcr_context *ctx = crypto_aead_ctx(tfm);
2431 struct uld_ctx *u_ctx = ULD_CTX(ctx);
2432 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2433 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2434 struct sk_buff *skb = NULL;
2435 struct chcr_wr *chcr_req;
2436 struct cpl_rx_phys_dsgl *phys_cpl;
2437 struct phys_sge_parm sg_param;
2438 struct scatterlist *src;
2439 unsigned int frags = 0, transhdr_len;
2440 unsigned int ivsize = AES_BLOCK_SIZE;
2441 unsigned int dst_size = 0, kctx_len, nents, assoclen = req->assoclen;
2442 unsigned char tag_offset = 0;
2443 unsigned int authsize = crypto_aead_authsize(tfm);
2444 int error = -EINVAL, src_nent;
2445 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
2447 struct adapter *adap = padap(ctx->dev);
2449 reqctx->newdstsg = NULL;
2450 dst_size = assoclen + req->cryptlen + (op_type ? -authsize :
2452 /* validate key size */
2453 if (aeadctx->enckey_len == 0)
2456 if (op_type && req->cryptlen < crypto_aead_authsize(tfm))
2458 src_nent = sg_nents_for_len(req->src, assoclen + req->cryptlen);
2462 src = scatterwalk_ffwd(reqctx->srcffwd, req->src, assoclen);
2463 if (req->src != req->dst) {
2464 error = chcr_copy_assoc(req, aeadctx);
2466 return ERR_PTR(error);
2469 if (dst_size && is_newsg(req->dst, &nents)) {
2470 reqctx->newdstsg = alloc_new_sg(req->dst, nents);
2471 if (IS_ERR(reqctx->newdstsg))
2472 return ERR_CAST(reqctx->newdstsg);
2473 reqctx->dst = scatterwalk_ffwd(reqctx->dstffwd,
2474 reqctx->newdstsg, assoclen);
2476 if (req->src == req->dst)
2479 reqctx->dst = scatterwalk_ffwd(reqctx->dstffwd,
2480 req->dst, assoclen);
2483 reqctx->dst_nents = sg_nents_for_len(reqctx->dst, req->cryptlen +
2484 (op_type ? -authsize : authsize));
2485 if (reqctx->dst_nents < 0) {
2486 pr_err("GCM:Invalid Destination sg entries\n");
2492 dst_size = get_space_for_phys_dsgl(reqctx->dst_nents);
2493 kctx_len = ((DIV_ROUND_UP(aeadctx->enckey_len, 16)) << 4) +
2495 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
2496 if (chcr_aead_need_fallback(req, src_nent + MIN_GCM_SG,
2498 transhdr_len + (sgl_len(src_nent + MIN_GCM_SG) * 8),
2500 atomic_inc(&adap->chcr_stats.fallback);
2501 free_new_sg(reqctx->newdstsg);
2502 reqctx->newdstsg = NULL;
2503 return ERR_PTR(chcr_aead_fallback(req, op_type));
2505 skb = alloc_skb((transhdr_len + sizeof(struct sge_opaque_hdr)), flags);
2511 /* NIC driver is going to write the sge hdr. */
2512 skb_reserve(skb, sizeof(struct sge_opaque_hdr));
2514 chcr_req = __skb_put_zero(skb, transhdr_len);
2516 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106)
2517 assoclen = req->assoclen - 8;
2519 tag_offset = (op_type == CHCR_ENCRYPT_OP) ? 0 : authsize;
2520 chcr_req->sec_cpl.op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(
2521 ctx->dev->rx_channel_id, 2, (ivsize ?
2522 (assoclen + 1) : 0));
2523 chcr_req->sec_cpl.pldlen =
2524 htonl(assoclen + ivsize + req->cryptlen);
2525 chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
2526 assoclen ? 1 : 0, assoclen,
2527 assoclen + ivsize + 1, 0);
2528 chcr_req->sec_cpl.cipherstop_lo_authinsert =
2529 FILL_SEC_CPL_AUTHINSERT(0, assoclen + ivsize + 1,
2530 tag_offset, tag_offset);
2531 chcr_req->sec_cpl.seqno_numivs =
2532 FILL_SEC_CPL_SCMD0_SEQNO(op_type, (op_type ==
2533 CHCR_ENCRYPT_OP) ? 1 : 0,
2534 CHCR_SCMD_CIPHER_MODE_AES_GCM,
2535 CHCR_SCMD_AUTH_MODE_GHASH,
2536 aeadctx->hmac_ctrl, ivsize >> 1);
2537 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
2539 chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
2540 memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
2541 memcpy(chcr_req->key_ctx.key + (DIV_ROUND_UP(aeadctx->enckey_len, 16) *
2542 16), GCM_CTX(aeadctx)->ghash_h, AEAD_H_SIZE);
2544 /* prepare a 16 byte iv */
2545 /* S A L T | IV | 0x00000001 */
2546 if (get_aead_subtype(tfm) ==
2547 CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) {
2548 memcpy(reqctx->iv, aeadctx->salt, 4);
2549 memcpy(reqctx->iv + 4, req->iv, 8);
2551 memcpy(reqctx->iv, req->iv, 12);
2553 *((unsigned int *)(reqctx->iv + 12)) = htonl(0x01);
2555 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
2556 sg_param.nents = reqctx->dst_nents;
2557 sg_param.obsize = req->cryptlen + (op_type ? -authsize : authsize);
2559 error = map_writesg_phys_cpl(&u_ctx->lldi.pdev->dev, phys_cpl,
2560 reqctx->dst, &sg_param,
2565 skb_set_transport_header(skb, transhdr_len);
2566 write_sg_to_skb(skb, &frags, req->src, assoclen);
2567 write_buffer_to_skb(skb, &frags, reqctx->iv, ivsize);
2568 write_sg_to_skb(skb, &frags, src, req->cryptlen);
2569 atomic_inc(&adap->chcr_stats.aead_rqst);
2570 create_wreq(ctx, chcr_req, &req->base, skb, kctx_len, size, 1,
2571 sizeof(struct cpl_rx_phys_dsgl) + dst_size,
2581 free_new_sg(reqctx->newdstsg);
2582 reqctx->newdstsg = NULL;
2583 return ERR_PTR(error);
2588 static int chcr_aead_cra_init(struct crypto_aead *tfm)
2590 struct chcr_context *ctx = crypto_aead_ctx(tfm);
2591 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2592 struct aead_alg *alg = crypto_aead_alg(tfm);
2594 aeadctx->sw_cipher = crypto_alloc_aead(alg->base.cra_name, 0,
2595 CRYPTO_ALG_NEED_FALLBACK |
2597 if (IS_ERR(aeadctx->sw_cipher))
2598 return PTR_ERR(aeadctx->sw_cipher);
2599 crypto_aead_set_reqsize(tfm, max(sizeof(struct chcr_aead_reqctx),
2600 sizeof(struct aead_request) +
2601 crypto_aead_reqsize(aeadctx->sw_cipher)));
2602 aeadctx->null = crypto_get_default_null_skcipher();
2603 if (IS_ERR(aeadctx->null))
2604 return PTR_ERR(aeadctx->null);
2605 return chcr_device_init(ctx);
2608 static void chcr_aead_cra_exit(struct crypto_aead *tfm)
2610 struct chcr_context *ctx = crypto_aead_ctx(tfm);
2611 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2613 crypto_put_default_null_skcipher();
2614 crypto_free_aead(aeadctx->sw_cipher);
2617 static int chcr_authenc_null_setauthsize(struct crypto_aead *tfm,
2618 unsigned int authsize)
2620 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2622 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NOP;
2623 aeadctx->mayverify = VERIFY_HW;
2624 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
2626 static int chcr_authenc_setauthsize(struct crypto_aead *tfm,
2627 unsigned int authsize)
2629 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2630 u32 maxauth = crypto_aead_maxauthsize(tfm);
2632 /*SHA1 authsize in ipsec is 12 instead of 10 i.e maxauthsize / 2 is not
2633 * true for sha1. authsize == 12 condition should be before
2634 * authsize == (maxauth >> 1)
2636 if (authsize == ICV_4) {
2637 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
2638 aeadctx->mayverify = VERIFY_HW;
2639 } else if (authsize == ICV_6) {
2640 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
2641 aeadctx->mayverify = VERIFY_HW;
2642 } else if (authsize == ICV_10) {
2643 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
2644 aeadctx->mayverify = VERIFY_HW;
2645 } else if (authsize == ICV_12) {
2646 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2647 aeadctx->mayverify = VERIFY_HW;
2648 } else if (authsize == ICV_14) {
2649 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
2650 aeadctx->mayverify = VERIFY_HW;
2651 } else if (authsize == (maxauth >> 1)) {
2652 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2653 aeadctx->mayverify = VERIFY_HW;
2654 } else if (authsize == maxauth) {
2655 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2656 aeadctx->mayverify = VERIFY_HW;
2658 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2659 aeadctx->mayverify = VERIFY_SW;
2661 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
2665 static int chcr_gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
2667 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2671 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
2672 aeadctx->mayverify = VERIFY_HW;
2675 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2676 aeadctx->mayverify = VERIFY_HW;
2679 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2680 aeadctx->mayverify = VERIFY_HW;
2683 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
2684 aeadctx->mayverify = VERIFY_HW;
2687 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2688 aeadctx->mayverify = VERIFY_HW;
2692 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2693 aeadctx->mayverify = VERIFY_SW;
2698 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
2701 static int chcr_4106_4309_setauthsize(struct crypto_aead *tfm,
2702 unsigned int authsize)
2704 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2708 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2709 aeadctx->mayverify = VERIFY_HW;
2712 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2713 aeadctx->mayverify = VERIFY_HW;
2716 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2717 aeadctx->mayverify = VERIFY_HW;
2722 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
2725 static int chcr_ccm_setauthsize(struct crypto_aead *tfm,
2726 unsigned int authsize)
2728 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
2732 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
2733 aeadctx->mayverify = VERIFY_HW;
2736 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
2737 aeadctx->mayverify = VERIFY_HW;
2740 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
2741 aeadctx->mayverify = VERIFY_HW;
2744 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
2745 aeadctx->mayverify = VERIFY_HW;
2748 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
2749 aeadctx->mayverify = VERIFY_HW;
2752 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
2753 aeadctx->mayverify = VERIFY_HW;
2756 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
2757 aeadctx->mayverify = VERIFY_HW;
2762 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
2765 static int chcr_ccm_common_setkey(struct crypto_aead *aead,
2767 unsigned int keylen)
2769 struct chcr_context *ctx = crypto_aead_ctx(aead);
2770 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2771 unsigned char ck_size, mk_size;
2772 int key_ctx_size = 0;
2774 key_ctx_size = sizeof(struct _key_ctx) +
2775 ((DIV_ROUND_UP(keylen, 16)) << 4) * 2;
2776 if (keylen == AES_KEYSIZE_128) {
2777 mk_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2778 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2779 } else if (keylen == AES_KEYSIZE_192) {
2780 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2781 mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_192;
2782 } else if (keylen == AES_KEYSIZE_256) {
2783 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2784 mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
2786 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
2787 aeadctx->enckey_len = 0;
2790 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, mk_size, 0, 0,
2792 memcpy(aeadctx->key, key, keylen);
2793 aeadctx->enckey_len = keylen;
2798 static int chcr_aead_ccm_setkey(struct crypto_aead *aead,
2800 unsigned int keylen)
2802 struct chcr_context *ctx = crypto_aead_ctx(aead);
2803 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2806 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
2807 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead) &
2808 CRYPTO_TFM_REQ_MASK);
2809 error = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
2810 crypto_aead_clear_flags(aead, CRYPTO_TFM_RES_MASK);
2811 crypto_aead_set_flags(aead, crypto_aead_get_flags(aeadctx->sw_cipher) &
2812 CRYPTO_TFM_RES_MASK);
2815 return chcr_ccm_common_setkey(aead, key, keylen);
2818 static int chcr_aead_rfc4309_setkey(struct crypto_aead *aead, const u8 *key,
2819 unsigned int keylen)
2821 struct chcr_context *ctx = crypto_aead_ctx(aead);
2822 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2826 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
2827 aeadctx->enckey_len = 0;
2830 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
2831 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead) &
2832 CRYPTO_TFM_REQ_MASK);
2833 error = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
2834 crypto_aead_clear_flags(aead, CRYPTO_TFM_RES_MASK);
2835 crypto_aead_set_flags(aead, crypto_aead_get_flags(aeadctx->sw_cipher) &
2836 CRYPTO_TFM_RES_MASK);
2840 memcpy(aeadctx->salt, key + keylen, 3);
2841 return chcr_ccm_common_setkey(aead, key, keylen);
2844 static int chcr_gcm_setkey(struct crypto_aead *aead, const u8 *key,
2845 unsigned int keylen)
2847 struct chcr_context *ctx = crypto_aead_ctx(aead);
2848 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2849 struct chcr_gcm_ctx *gctx = GCM_CTX(aeadctx);
2850 struct crypto_cipher *cipher;
2851 unsigned int ck_size;
2852 int ret = 0, key_ctx_size = 0;
2854 aeadctx->enckey_len = 0;
2855 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
2856 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead)
2857 & CRYPTO_TFM_REQ_MASK);
2858 ret = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
2859 crypto_aead_clear_flags(aead, CRYPTO_TFM_RES_MASK);
2860 crypto_aead_set_flags(aead, crypto_aead_get_flags(aeadctx->sw_cipher) &
2861 CRYPTO_TFM_RES_MASK);
2865 if (get_aead_subtype(aead) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106 &&
2867 keylen -= 4; /* nonce/salt is present in the last 4 bytes */
2868 memcpy(aeadctx->salt, key + keylen, 4);
2870 if (keylen == AES_KEYSIZE_128) {
2871 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2872 } else if (keylen == AES_KEYSIZE_192) {
2873 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2874 } else if (keylen == AES_KEYSIZE_256) {
2875 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2877 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
2878 pr_err("GCM: Invalid key length %d\n", keylen);
2883 memcpy(aeadctx->key, key, keylen);
2884 aeadctx->enckey_len = keylen;
2885 key_ctx_size = sizeof(struct _key_ctx) +
2886 ((DIV_ROUND_UP(keylen, 16)) << 4) +
2888 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size,
2889 CHCR_KEYCTX_MAC_KEY_SIZE_128,
2892 /* Calculate the H = CIPH(K, 0 repeated 16 times).
2893 * It will go in key context
2895 cipher = crypto_alloc_cipher("aes-generic", 0, 0);
2896 if (IS_ERR(cipher)) {
2897 aeadctx->enckey_len = 0;
2902 ret = crypto_cipher_setkey(cipher, key, keylen);
2904 aeadctx->enckey_len = 0;
2907 memset(gctx->ghash_h, 0, AEAD_H_SIZE);
2908 crypto_cipher_encrypt_one(cipher, gctx->ghash_h, gctx->ghash_h);
2911 crypto_free_cipher(cipher);
2916 static int chcr_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
2917 unsigned int keylen)
2919 struct chcr_context *ctx = crypto_aead_ctx(authenc);
2920 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2921 struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
2922 /* it contains auth and cipher key both*/
2923 struct crypto_authenc_keys keys;
2925 unsigned int max_authsize = crypto_aead_alg(authenc)->maxauthsize;
2926 int err = 0, i, key_ctx_len = 0;
2927 unsigned char ck_size = 0;
2928 unsigned char pad[CHCR_HASH_MAX_BLOCK_SIZE_128] = { 0 };
2929 struct crypto_shash *base_hash = ERR_PTR(-EINVAL);
2930 struct algo_param param;
2934 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
2935 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(authenc)
2936 & CRYPTO_TFM_REQ_MASK);
2937 err = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
2938 crypto_aead_clear_flags(authenc, CRYPTO_TFM_RES_MASK);
2939 crypto_aead_set_flags(authenc, crypto_aead_get_flags(aeadctx->sw_cipher)
2940 & CRYPTO_TFM_RES_MASK);
2944 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) {
2945 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
2949 if (get_alg_config(¶m, max_authsize)) {
2950 pr_err("chcr : Unsupported digest size\n");
2953 if (keys.enckeylen == AES_KEYSIZE_128) {
2954 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
2955 } else if (keys.enckeylen == AES_KEYSIZE_192) {
2956 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
2957 } else if (keys.enckeylen == AES_KEYSIZE_256) {
2958 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
2960 pr_err("chcr : Unsupported cipher key\n");
2964 /* Copy only encryption key. We use authkey to generate h(ipad) and
2965 * h(opad) so authkey is not needed again. authkeylen size have the
2966 * size of the hash digest size.
2968 memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
2969 aeadctx->enckey_len = keys.enckeylen;
2970 get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
2971 aeadctx->enckey_len << 3);
2973 base_hash = chcr_alloc_shash(max_authsize);
2974 if (IS_ERR(base_hash)) {
2975 pr_err("chcr : Base driver cannot be loaded\n");
2976 aeadctx->enckey_len = 0;
2980 SHASH_DESC_ON_STACK(shash, base_hash);
2981 shash->tfm = base_hash;
2982 shash->flags = crypto_shash_get_flags(base_hash);
2983 bs = crypto_shash_blocksize(base_hash);
2984 align = KEYCTX_ALIGN_PAD(max_authsize);
2985 o_ptr = actx->h_iopad + param.result_size + align;
2987 if (keys.authkeylen > bs) {
2988 err = crypto_shash_digest(shash, keys.authkey,
2992 pr_err("chcr : Base driver cannot be loaded\n");
2995 keys.authkeylen = max_authsize;
2997 memcpy(o_ptr, keys.authkey, keys.authkeylen);
2999 /* Compute the ipad-digest*/
3000 memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
3001 memcpy(pad, o_ptr, keys.authkeylen);
3002 for (i = 0; i < bs >> 2; i++)
3003 *((unsigned int *)pad + i) ^= IPAD_DATA;
3005 if (chcr_compute_partial_hash(shash, pad, actx->h_iopad,
3008 /* Compute the opad-digest */
3009 memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
3010 memcpy(pad, o_ptr, keys.authkeylen);
3011 for (i = 0; i < bs >> 2; i++)
3012 *((unsigned int *)pad + i) ^= OPAD_DATA;
3014 if (chcr_compute_partial_hash(shash, pad, o_ptr, max_authsize))
3017 /* convert the ipad and opad digest to network order */
3018 chcr_change_order(actx->h_iopad, param.result_size);
3019 chcr_change_order(o_ptr, param.result_size);
3020 key_ctx_len = sizeof(struct _key_ctx) +
3021 ((DIV_ROUND_UP(keys.enckeylen, 16)) << 4) +
3022 (param.result_size + align) * 2;
3023 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, param.mk_size,
3024 0, 1, key_ctx_len >> 4);
3025 actx->auth_mode = param.auth_mode;
3026 chcr_free_shash(base_hash);
3031 aeadctx->enckey_len = 0;
3032 if (!IS_ERR(base_hash))
3033 chcr_free_shash(base_hash);
3037 static int chcr_aead_digest_null_setkey(struct crypto_aead *authenc,
3038 const u8 *key, unsigned int keylen)
3040 struct chcr_context *ctx = crypto_aead_ctx(authenc);
3041 struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
3042 struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
3043 struct crypto_authenc_keys keys;
3045 /* it contains auth and cipher key both*/
3046 int key_ctx_len = 0;
3047 unsigned char ck_size = 0;
3049 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
3050 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(authenc)
3051 & CRYPTO_TFM_REQ_MASK);
3052 err = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
3053 crypto_aead_clear_flags(authenc, CRYPTO_TFM_RES_MASK);
3054 crypto_aead_set_flags(authenc, crypto_aead_get_flags(aeadctx->sw_cipher)
3055 & CRYPTO_TFM_RES_MASK);
3059 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) {
3060 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
3063 if (keys.enckeylen == AES_KEYSIZE_128) {
3064 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
3065 } else if (keys.enckeylen == AES_KEYSIZE_192) {
3066 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
3067 } else if (keys.enckeylen == AES_KEYSIZE_256) {
3068 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
3070 pr_err("chcr : Unsupported cipher key\n");
3073 memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
3074 aeadctx->enckey_len = keys.enckeylen;
3075 get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
3076 aeadctx->enckey_len << 3);
3077 key_ctx_len = sizeof(struct _key_ctx)
3078 + ((DIV_ROUND_UP(keys.enckeylen, 16)) << 4);
3080 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY, 0,
3081 0, key_ctx_len >> 4);
3082 actx->auth_mode = CHCR_SCMD_AUTH_MODE_NOP;
3085 aeadctx->enckey_len = 0;
3088 static int chcr_aead_encrypt(struct aead_request *req)
3090 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3091 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
3093 reqctx->verify = VERIFY_HW;
3095 switch (get_aead_subtype(tfm)) {
3096 case CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC:
3097 case CRYPTO_ALG_SUB_TYPE_AEAD_NULL:
3098 return chcr_aead_op(req, CHCR_ENCRYPT_OP, 0,
3100 case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
3101 case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
3102 return chcr_aead_op(req, CHCR_ENCRYPT_OP, 0,
3103 create_aead_ccm_wr);
3105 return chcr_aead_op(req, CHCR_ENCRYPT_OP, 0,
3110 static int chcr_aead_decrypt(struct aead_request *req)
3112 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3113 struct chcr_aead_ctx *aeadctx = AEAD_CTX(crypto_aead_ctx(tfm));
3114 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
3117 if (aeadctx->mayverify == VERIFY_SW) {
3118 size = crypto_aead_maxauthsize(tfm);
3119 reqctx->verify = VERIFY_SW;
3122 reqctx->verify = VERIFY_HW;
3125 switch (get_aead_subtype(tfm)) {
3126 case CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC:
3127 case CRYPTO_ALG_SUB_TYPE_AEAD_NULL:
3128 return chcr_aead_op(req, CHCR_DECRYPT_OP, size,
3130 case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
3131 case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
3132 return chcr_aead_op(req, CHCR_DECRYPT_OP, size,
3133 create_aead_ccm_wr);
3135 return chcr_aead_op(req, CHCR_DECRYPT_OP, size,
3140 static int chcr_aead_op(struct aead_request *req,
3141 unsigned short op_type,
3143 create_wr_t create_wr_fn)
3145 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3146 struct chcr_context *ctx = crypto_aead_ctx(tfm);
3147 struct uld_ctx *u_ctx;
3148 struct sk_buff *skb;
3151 pr_err("chcr : %s : No crypto device.\n", __func__);
3154 u_ctx = ULD_CTX(ctx);
3155 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
3157 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
3161 /* Form a WR from req */
3162 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[ctx->rx_qidx], size,
3165 if (IS_ERR(skb) || !skb)
3166 return PTR_ERR(skb);
3168 skb->dev = u_ctx->lldi.ports[0];
3169 set_wr_txq(skb, CPL_PRIORITY_DATA, ctx->tx_qidx);
3171 return -EINPROGRESS;
3173 static struct chcr_alg_template driver_algs[] = {
3176 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_SUB_TYPE_CBC,
3179 .cra_name = "cbc(aes)",
3180 .cra_driver_name = "cbc-aes-chcr",
3181 .cra_blocksize = AES_BLOCK_SIZE,
3182 .cra_init = chcr_cra_init,
3183 .cra_exit = chcr_cra_exit,
3184 .cra_u.ablkcipher = {
3185 .min_keysize = AES_MIN_KEY_SIZE,
3186 .max_keysize = AES_MAX_KEY_SIZE,
3187 .ivsize = AES_BLOCK_SIZE,
3188 .setkey = chcr_aes_cbc_setkey,
3189 .encrypt = chcr_aes_encrypt,
3190 .decrypt = chcr_aes_decrypt,
3195 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_SUB_TYPE_XTS,
3198 .cra_name = "xts(aes)",
3199 .cra_driver_name = "xts-aes-chcr",
3200 .cra_blocksize = AES_BLOCK_SIZE,
3201 .cra_init = chcr_cra_init,
3203 .cra_u .ablkcipher = {
3204 .min_keysize = 2 * AES_MIN_KEY_SIZE,
3205 .max_keysize = 2 * AES_MAX_KEY_SIZE,
3206 .ivsize = AES_BLOCK_SIZE,
3207 .setkey = chcr_aes_xts_setkey,
3208 .encrypt = chcr_aes_encrypt,
3209 .decrypt = chcr_aes_decrypt,
3214 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_SUB_TYPE_CTR,
3217 .cra_name = "ctr(aes)",
3218 .cra_driver_name = "ctr-aes-chcr",
3220 .cra_init = chcr_cra_init,
3221 .cra_exit = chcr_cra_exit,
3222 .cra_u.ablkcipher = {
3223 .min_keysize = AES_MIN_KEY_SIZE,
3224 .max_keysize = AES_MAX_KEY_SIZE,
3225 .ivsize = AES_BLOCK_SIZE,
3226 .setkey = chcr_aes_ctr_setkey,
3227 .encrypt = chcr_aes_encrypt,
3228 .decrypt = chcr_aes_decrypt,
3233 .type = CRYPTO_ALG_TYPE_ABLKCIPHER |
3234 CRYPTO_ALG_SUB_TYPE_CTR_RFC3686,
3237 .cra_name = "rfc3686(ctr(aes))",
3238 .cra_driver_name = "rfc3686-ctr-aes-chcr",
3240 .cra_init = chcr_rfc3686_init,
3241 .cra_exit = chcr_cra_exit,
3242 .cra_u.ablkcipher = {
3243 .min_keysize = AES_MIN_KEY_SIZE +
3244 CTR_RFC3686_NONCE_SIZE,
3245 .max_keysize = AES_MAX_KEY_SIZE +
3246 CTR_RFC3686_NONCE_SIZE,
3247 .ivsize = CTR_RFC3686_IV_SIZE,
3248 .setkey = chcr_aes_rfc3686_setkey,
3249 .encrypt = chcr_aes_encrypt,
3250 .decrypt = chcr_aes_decrypt,
3257 .type = CRYPTO_ALG_TYPE_AHASH,
3260 .halg.digestsize = SHA1_DIGEST_SIZE,
3263 .cra_driver_name = "sha1-chcr",
3264 .cra_blocksize = SHA1_BLOCK_SIZE,
3269 .type = CRYPTO_ALG_TYPE_AHASH,
3272 .halg.digestsize = SHA256_DIGEST_SIZE,
3274 .cra_name = "sha256",
3275 .cra_driver_name = "sha256-chcr",
3276 .cra_blocksize = SHA256_BLOCK_SIZE,
3281 .type = CRYPTO_ALG_TYPE_AHASH,
3284 .halg.digestsize = SHA224_DIGEST_SIZE,
3286 .cra_name = "sha224",
3287 .cra_driver_name = "sha224-chcr",
3288 .cra_blocksize = SHA224_BLOCK_SIZE,
3293 .type = CRYPTO_ALG_TYPE_AHASH,
3296 .halg.digestsize = SHA384_DIGEST_SIZE,
3298 .cra_name = "sha384",
3299 .cra_driver_name = "sha384-chcr",
3300 .cra_blocksize = SHA384_BLOCK_SIZE,
3305 .type = CRYPTO_ALG_TYPE_AHASH,
3308 .halg.digestsize = SHA512_DIGEST_SIZE,
3310 .cra_name = "sha512",
3311 .cra_driver_name = "sha512-chcr",
3312 .cra_blocksize = SHA512_BLOCK_SIZE,
3318 .type = CRYPTO_ALG_TYPE_HMAC,
3321 .halg.digestsize = SHA1_DIGEST_SIZE,
3323 .cra_name = "hmac(sha1)",
3324 .cra_driver_name = "hmac-sha1-chcr",
3325 .cra_blocksize = SHA1_BLOCK_SIZE,
3330 .type = CRYPTO_ALG_TYPE_HMAC,
3333 .halg.digestsize = SHA224_DIGEST_SIZE,
3335 .cra_name = "hmac(sha224)",
3336 .cra_driver_name = "hmac-sha224-chcr",
3337 .cra_blocksize = SHA224_BLOCK_SIZE,
3342 .type = CRYPTO_ALG_TYPE_HMAC,
3345 .halg.digestsize = SHA256_DIGEST_SIZE,
3347 .cra_name = "hmac(sha256)",
3348 .cra_driver_name = "hmac-sha256-chcr",
3349 .cra_blocksize = SHA256_BLOCK_SIZE,
3354 .type = CRYPTO_ALG_TYPE_HMAC,
3357 .halg.digestsize = SHA384_DIGEST_SIZE,
3359 .cra_name = "hmac(sha384)",
3360 .cra_driver_name = "hmac-sha384-chcr",
3361 .cra_blocksize = SHA384_BLOCK_SIZE,
3366 .type = CRYPTO_ALG_TYPE_HMAC,
3369 .halg.digestsize = SHA512_DIGEST_SIZE,
3371 .cra_name = "hmac(sha512)",
3372 .cra_driver_name = "hmac-sha512-chcr",
3373 .cra_blocksize = SHA512_BLOCK_SIZE,
3377 /* Add AEAD Algorithms */
3379 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_GCM,
3383 .cra_name = "gcm(aes)",
3384 .cra_driver_name = "gcm-aes-chcr",
3386 .cra_priority = CHCR_AEAD_PRIORITY,
3387 .cra_ctxsize = sizeof(struct chcr_context) +
3388 sizeof(struct chcr_aead_ctx) +
3389 sizeof(struct chcr_gcm_ctx),
3392 .maxauthsize = GHASH_DIGEST_SIZE,
3393 .setkey = chcr_gcm_setkey,
3394 .setauthsize = chcr_gcm_setauthsize,
3398 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106,
3402 .cra_name = "rfc4106(gcm(aes))",
3403 .cra_driver_name = "rfc4106-gcm-aes-chcr",
3405 .cra_priority = CHCR_AEAD_PRIORITY + 1,
3406 .cra_ctxsize = sizeof(struct chcr_context) +
3407 sizeof(struct chcr_aead_ctx) +
3408 sizeof(struct chcr_gcm_ctx),
3412 .maxauthsize = GHASH_DIGEST_SIZE,
3413 .setkey = chcr_gcm_setkey,
3414 .setauthsize = chcr_4106_4309_setauthsize,
3418 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_CCM,
3422 .cra_name = "ccm(aes)",
3423 .cra_driver_name = "ccm-aes-chcr",
3425 .cra_priority = CHCR_AEAD_PRIORITY,
3426 .cra_ctxsize = sizeof(struct chcr_context) +
3427 sizeof(struct chcr_aead_ctx),
3430 .ivsize = AES_BLOCK_SIZE,
3431 .maxauthsize = GHASH_DIGEST_SIZE,
3432 .setkey = chcr_aead_ccm_setkey,
3433 .setauthsize = chcr_ccm_setauthsize,
3437 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309,
3441 .cra_name = "rfc4309(ccm(aes))",
3442 .cra_driver_name = "rfc4309-ccm-aes-chcr",
3444 .cra_priority = CHCR_AEAD_PRIORITY + 1,
3445 .cra_ctxsize = sizeof(struct chcr_context) +
3446 sizeof(struct chcr_aead_ctx),
3450 .maxauthsize = GHASH_DIGEST_SIZE,
3451 .setkey = chcr_aead_rfc4309_setkey,
3452 .setauthsize = chcr_4106_4309_setauthsize,
3456 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
3460 .cra_name = "authenc(hmac(sha1),cbc(aes))",
3462 "authenc-hmac-sha1-cbc-aes-chcr",
3463 .cra_blocksize = AES_BLOCK_SIZE,
3464 .cra_priority = CHCR_AEAD_PRIORITY,
3465 .cra_ctxsize = sizeof(struct chcr_context) +
3466 sizeof(struct chcr_aead_ctx) +
3467 sizeof(struct chcr_authenc_ctx),
3470 .ivsize = AES_BLOCK_SIZE,
3471 .maxauthsize = SHA1_DIGEST_SIZE,
3472 .setkey = chcr_authenc_setkey,
3473 .setauthsize = chcr_authenc_setauthsize,
3477 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
3482 .cra_name = "authenc(hmac(sha256),cbc(aes))",
3484 "authenc-hmac-sha256-cbc-aes-chcr",
3485 .cra_blocksize = AES_BLOCK_SIZE,
3486 .cra_priority = CHCR_AEAD_PRIORITY,
3487 .cra_ctxsize = sizeof(struct chcr_context) +
3488 sizeof(struct chcr_aead_ctx) +
3489 sizeof(struct chcr_authenc_ctx),
3492 .ivsize = AES_BLOCK_SIZE,
3493 .maxauthsize = SHA256_DIGEST_SIZE,
3494 .setkey = chcr_authenc_setkey,
3495 .setauthsize = chcr_authenc_setauthsize,
3499 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
3503 .cra_name = "authenc(hmac(sha224),cbc(aes))",
3505 "authenc-hmac-sha224-cbc-aes-chcr",
3506 .cra_blocksize = AES_BLOCK_SIZE,
3507 .cra_priority = CHCR_AEAD_PRIORITY,
3508 .cra_ctxsize = sizeof(struct chcr_context) +
3509 sizeof(struct chcr_aead_ctx) +
3510 sizeof(struct chcr_authenc_ctx),
3512 .ivsize = AES_BLOCK_SIZE,
3513 .maxauthsize = SHA224_DIGEST_SIZE,
3514 .setkey = chcr_authenc_setkey,
3515 .setauthsize = chcr_authenc_setauthsize,
3519 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
3523 .cra_name = "authenc(hmac(sha384),cbc(aes))",
3525 "authenc-hmac-sha384-cbc-aes-chcr",
3526 .cra_blocksize = AES_BLOCK_SIZE,
3527 .cra_priority = CHCR_AEAD_PRIORITY,
3528 .cra_ctxsize = sizeof(struct chcr_context) +
3529 sizeof(struct chcr_aead_ctx) +
3530 sizeof(struct chcr_authenc_ctx),
3533 .ivsize = AES_BLOCK_SIZE,
3534 .maxauthsize = SHA384_DIGEST_SIZE,
3535 .setkey = chcr_authenc_setkey,
3536 .setauthsize = chcr_authenc_setauthsize,
3540 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_AUTHENC,
3544 .cra_name = "authenc(hmac(sha512),cbc(aes))",
3546 "authenc-hmac-sha512-cbc-aes-chcr",
3547 .cra_blocksize = AES_BLOCK_SIZE,
3548 .cra_priority = CHCR_AEAD_PRIORITY,
3549 .cra_ctxsize = sizeof(struct chcr_context) +
3550 sizeof(struct chcr_aead_ctx) +
3551 sizeof(struct chcr_authenc_ctx),
3554 .ivsize = AES_BLOCK_SIZE,
3555 .maxauthsize = SHA512_DIGEST_SIZE,
3556 .setkey = chcr_authenc_setkey,
3557 .setauthsize = chcr_authenc_setauthsize,
3561 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_NULL,
3565 .cra_name = "authenc(digest_null,cbc(aes))",
3567 "authenc-digest_null-cbc-aes-chcr",
3568 .cra_blocksize = AES_BLOCK_SIZE,
3569 .cra_priority = CHCR_AEAD_PRIORITY,
3570 .cra_ctxsize = sizeof(struct chcr_context) +
3571 sizeof(struct chcr_aead_ctx) +
3572 sizeof(struct chcr_authenc_ctx),
3575 .ivsize = AES_BLOCK_SIZE,
3577 .setkey = chcr_aead_digest_null_setkey,
3578 .setauthsize = chcr_authenc_null_setauthsize,
3584 * chcr_unregister_alg - Deregister crypto algorithms with
3587 static int chcr_unregister_alg(void)
3591 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3592 switch (driver_algs[i].type & CRYPTO_ALG_TYPE_MASK) {
3593 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3594 if (driver_algs[i].is_registered)
3595 crypto_unregister_alg(
3596 &driver_algs[i].alg.crypto);
3598 case CRYPTO_ALG_TYPE_AEAD:
3599 if (driver_algs[i].is_registered)
3600 crypto_unregister_aead(
3601 &driver_algs[i].alg.aead);
3603 case CRYPTO_ALG_TYPE_AHASH:
3604 if (driver_algs[i].is_registered)
3605 crypto_unregister_ahash(
3606 &driver_algs[i].alg.hash);
3609 driver_algs[i].is_registered = 0;
3614 #define SZ_AHASH_CTX sizeof(struct chcr_context)
3615 #define SZ_AHASH_H_CTX (sizeof(struct chcr_context) + sizeof(struct hmac_ctx))
3616 #define SZ_AHASH_REQ_CTX sizeof(struct chcr_ahash_req_ctx)
3617 #define AHASH_CRA_FLAGS (CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC)
3620 * chcr_register_alg - Register crypto algorithms with kernel framework.
3622 static int chcr_register_alg(void)
3624 struct crypto_alg ai;
3625 struct ahash_alg *a_hash;
3629 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3630 if (driver_algs[i].is_registered)
3632 switch (driver_algs[i].type & CRYPTO_ALG_TYPE_MASK) {
3633 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3634 driver_algs[i].alg.crypto.cra_priority =
3636 driver_algs[i].alg.crypto.cra_module = THIS_MODULE;
3637 driver_algs[i].alg.crypto.cra_flags =
3638 CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC |
3639 CRYPTO_ALG_NEED_FALLBACK;
3640 driver_algs[i].alg.crypto.cra_ctxsize =
3641 sizeof(struct chcr_context) +
3642 sizeof(struct ablk_ctx);
3643 driver_algs[i].alg.crypto.cra_alignmask = 0;
3644 driver_algs[i].alg.crypto.cra_type =
3645 &crypto_ablkcipher_type;
3646 err = crypto_register_alg(&driver_algs[i].alg.crypto);
3647 name = driver_algs[i].alg.crypto.cra_driver_name;
3649 case CRYPTO_ALG_TYPE_AEAD:
3650 driver_algs[i].alg.aead.base.cra_flags =
3651 CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC |
3652 CRYPTO_ALG_NEED_FALLBACK;
3653 driver_algs[i].alg.aead.encrypt = chcr_aead_encrypt;
3654 driver_algs[i].alg.aead.decrypt = chcr_aead_decrypt;
3655 driver_algs[i].alg.aead.init = chcr_aead_cra_init;
3656 driver_algs[i].alg.aead.exit = chcr_aead_cra_exit;
3657 driver_algs[i].alg.aead.base.cra_module = THIS_MODULE;
3658 err = crypto_register_aead(&driver_algs[i].alg.aead);
3659 name = driver_algs[i].alg.aead.base.cra_driver_name;
3661 case CRYPTO_ALG_TYPE_AHASH:
3662 a_hash = &driver_algs[i].alg.hash;
3663 a_hash->update = chcr_ahash_update;
3664 a_hash->final = chcr_ahash_final;
3665 a_hash->finup = chcr_ahash_finup;
3666 a_hash->digest = chcr_ahash_digest;
3667 a_hash->export = chcr_ahash_export;
3668 a_hash->import = chcr_ahash_import;
3669 a_hash->halg.statesize = SZ_AHASH_REQ_CTX;
3670 a_hash->halg.base.cra_priority = CHCR_CRA_PRIORITY;
3671 a_hash->halg.base.cra_module = THIS_MODULE;
3672 a_hash->halg.base.cra_flags = AHASH_CRA_FLAGS;
3673 a_hash->halg.base.cra_alignmask = 0;
3674 a_hash->halg.base.cra_exit = NULL;
3675 a_hash->halg.base.cra_type = &crypto_ahash_type;
3677 if (driver_algs[i].type == CRYPTO_ALG_TYPE_HMAC) {
3678 a_hash->halg.base.cra_init = chcr_hmac_cra_init;
3679 a_hash->halg.base.cra_exit = chcr_hmac_cra_exit;
3680 a_hash->init = chcr_hmac_init;
3681 a_hash->setkey = chcr_ahash_setkey;
3682 a_hash->halg.base.cra_ctxsize = SZ_AHASH_H_CTX;
3684 a_hash->init = chcr_sha_init;
3685 a_hash->halg.base.cra_ctxsize = SZ_AHASH_CTX;
3686 a_hash->halg.base.cra_init = chcr_sha_cra_init;
3688 err = crypto_register_ahash(&driver_algs[i].alg.hash);
3689 ai = driver_algs[i].alg.hash.halg.base;
3690 name = ai.cra_driver_name;
3694 pr_err("chcr : %s : Algorithm registration failed\n",
3698 driver_algs[i].is_registered = 1;
3704 chcr_unregister_alg();
3709 * start_crypto - Register the crypto algorithms.
3710 * This should called once when the first device comesup. After this
3711 * kernel will start calling driver APIs for crypto operations.
3713 int start_crypto(void)
3715 return chcr_register_alg();
3719 * stop_crypto - Deregister all the crypto algorithms with kernel.
3720 * This should be called once when the last device goes down. After this
3721 * kernel will not call the driver API for crypto operations.
3723 int stop_crypto(void)
3725 chcr_unregister_alg();