GNU Linux-libre 5.19-rc6-gnu
[releases.git] / drivers / crypto / inside-secure / safexcel_hash.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7
8 #include <crypto/aes.h>
9 #include <crypto/hmac.h>
10 #include <crypto/md5.h>
11 #include <crypto/sha1.h>
12 #include <crypto/sha2.h>
13 #include <crypto/sha3.h>
14 #include <crypto/skcipher.h>
15 #include <crypto/sm3.h>
16 #include <crypto/internal/cipher.h>
17 #include <linux/device.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/dmapool.h>
20
21 #include "safexcel.h"
22
23 struct safexcel_ahash_ctx {
24         struct safexcel_context base;
25
26         u32 alg;
27         u8  key_sz;
28         bool cbcmac;
29         bool do_fallback;
30         bool fb_init_done;
31         bool fb_do_setkey;
32
33         struct crypto_cipher *kaes;
34         struct crypto_ahash *fback;
35         struct crypto_shash *shpre;
36         struct shash_desc *shdesc;
37 };
38
39 struct safexcel_ahash_req {
40         bool last_req;
41         bool finish;
42         bool hmac;
43         bool needs_inv;
44         bool hmac_zlen;
45         bool len_is_le;
46         bool not_first;
47         bool xcbcmac;
48
49         int nents;
50         dma_addr_t result_dma;
51
52         u32 digest;
53
54         u8 state_sz;    /* expected state size, only set once */
55         u8 block_sz;    /* block size, only set once */
56         u8 digest_sz;   /* output digest size, only set once */
57         __le32 state[SHA3_512_BLOCK_SIZE /
58                      sizeof(__le32)] __aligned(sizeof(__le32));
59
60         u64 len;
61         u64 processed;
62
63         u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
64         dma_addr_t cache_dma;
65         unsigned int cache_sz;
66
67         u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
68 };
69
70 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
71 {
72         return req->len - req->processed;
73 }
74
75 static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
76                                 u32 input_length, u32 result_length,
77                                 bool cbcmac)
78 {
79         struct safexcel_token *token =
80                 (struct safexcel_token *)cdesc->control_data.token;
81
82         token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
83         token[0].packet_length = input_length;
84         token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
85
86         input_length &= 15;
87         if (unlikely(cbcmac && input_length)) {
88                 token[0].stat =  0;
89                 token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
90                 token[1].packet_length = 16 - input_length;
91                 token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
92                 token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
93         } else {
94                 token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
95                 eip197_noop_token(&token[1]);
96         }
97
98         token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
99         token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
100                         EIP197_TOKEN_STAT_LAST_PACKET;
101         token[2].packet_length = result_length;
102         token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
103                                 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
104
105         eip197_noop_token(&token[3]);
106 }
107
108 static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
109                                      struct safexcel_ahash_req *req,
110                                      struct safexcel_command_desc *cdesc)
111 {
112         struct safexcel_crypto_priv *priv = ctx->base.priv;
113         u64 count = 0;
114
115         cdesc->control_data.control0 = ctx->alg;
116         cdesc->control_data.control1 = 0;
117
118         /*
119          * Copy the input digest if needed, and setup the context
120          * fields. Do this now as we need it to setup the first command
121          * descriptor.
122          */
123         if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
124                 if (req->xcbcmac)
125                         memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz);
126                 else
127                         memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
128
129                 if (!req->finish && req->xcbcmac)
130                         cdesc->control_data.control0 |=
131                                 CONTEXT_CONTROL_DIGEST_XCM |
132                                 CONTEXT_CONTROL_TYPE_HASH_OUT  |
133                                 CONTEXT_CONTROL_NO_FINISH_HASH |
134                                 CONTEXT_CONTROL_SIZE(req->state_sz /
135                                                      sizeof(u32));
136                 else
137                         cdesc->control_data.control0 |=
138                                 CONTEXT_CONTROL_DIGEST_XCM |
139                                 CONTEXT_CONTROL_TYPE_HASH_OUT  |
140                                 CONTEXT_CONTROL_SIZE(req->state_sz /
141                                                      sizeof(u32));
142                 return;
143         } else if (!req->processed) {
144                 /* First - and possibly only - block of basic hash only */
145                 if (req->finish)
146                         cdesc->control_data.control0 |= req->digest |
147                                 CONTEXT_CONTROL_TYPE_HASH_OUT |
148                                 CONTEXT_CONTROL_RESTART_HASH  |
149                                 /* ensure its not 0! */
150                                 CONTEXT_CONTROL_SIZE(1);
151                 else
152                         cdesc->control_data.control0 |= req->digest |
153                                 CONTEXT_CONTROL_TYPE_HASH_OUT  |
154                                 CONTEXT_CONTROL_RESTART_HASH   |
155                                 CONTEXT_CONTROL_NO_FINISH_HASH |
156                                 /* ensure its not 0! */
157                                 CONTEXT_CONTROL_SIZE(1);
158                 return;
159         }
160
161         /* Hash continuation or HMAC, setup (inner) digest from state */
162         memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
163
164         if (req->finish) {
165                 /* Compute digest count for hash/HMAC finish operations */
166                 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
167                     req->hmac_zlen || (req->processed != req->block_sz)) {
168                         count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
169
170                         /* This is a hardware limitation, as the
171                          * counter must fit into an u32. This represents
172                          * a fairly big amount of input data, so we
173                          * shouldn't see this.
174                          */
175                         if (unlikely(count & 0xffffffff00000000ULL)) {
176                                 dev_warn(priv->dev,
177                                          "Input data is too big\n");
178                                 return;
179                         }
180                 }
181
182                 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
183                     /* Special case: zero length HMAC */
184                     req->hmac_zlen ||
185                     /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
186                     (req->processed != req->block_sz)) {
187                         /* Basic hash continue operation, need digest + cnt */
188                         cdesc->control_data.control0 |=
189                                 CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
190                                 CONTEXT_CONTROL_TYPE_HASH_OUT |
191                                 CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
192                         /* For zero-len HMAC, don't finalize, already padded! */
193                         if (req->hmac_zlen)
194                                 cdesc->control_data.control0 |=
195                                         CONTEXT_CONTROL_NO_FINISH_HASH;
196                         cdesc->control_data.control1 |=
197                                 CONTEXT_CONTROL_DIGEST_CNT;
198                         ctx->base.ctxr->data[req->state_sz >> 2] =
199                                 cpu_to_le32(count);
200                         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
201
202                         /* Clear zero-length HMAC flag for next operation! */
203                         req->hmac_zlen = false;
204                 } else { /* HMAC */
205                         /* Need outer digest for HMAC finalization */
206                         memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
207                                &ctx->base.opad, req->state_sz);
208
209                         /* Single pass HMAC - no digest count */
210                         cdesc->control_data.control0 |=
211                                 CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
212                                 CONTEXT_CONTROL_TYPE_HASH_OUT |
213                                 CONTEXT_CONTROL_DIGEST_HMAC;
214                 }
215         } else { /* Hash continuation, do not finish yet */
216                 cdesc->control_data.control0 |=
217                         CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
218                         CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
219                         CONTEXT_CONTROL_TYPE_HASH_OUT |
220                         CONTEXT_CONTROL_NO_FINISH_HASH;
221         }
222 }
223
224 static int safexcel_ahash_enqueue(struct ahash_request *areq);
225
226 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
227                                       int ring,
228                                       struct crypto_async_request *async,
229                                       bool *should_complete, int *ret)
230 {
231         struct safexcel_result_desc *rdesc;
232         struct ahash_request *areq = ahash_request_cast(async);
233         struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
234         struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
235         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
236         u64 cache_len;
237
238         *ret = 0;
239
240         rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
241         if (IS_ERR(rdesc)) {
242                 dev_err(priv->dev,
243                         "hash: result: could not retrieve the result descriptor\n");
244                 *ret = PTR_ERR(rdesc);
245         } else {
246                 *ret = safexcel_rdesc_check_errors(priv, rdesc);
247         }
248
249         safexcel_complete(priv, ring);
250
251         if (sreq->nents) {
252                 dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
253                 sreq->nents = 0;
254         }
255
256         if (sreq->result_dma) {
257                 dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
258                                  DMA_FROM_DEVICE);
259                 sreq->result_dma = 0;
260         }
261
262         if (sreq->cache_dma) {
263                 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
264                                  DMA_TO_DEVICE);
265                 sreq->cache_dma = 0;
266                 sreq->cache_sz = 0;
267         }
268
269         if (sreq->finish) {
270                 if (sreq->hmac &&
271                     (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
272                         /* Faking HMAC using hash - need to do outer hash */
273                         memcpy(sreq->cache, sreq->state,
274                                crypto_ahash_digestsize(ahash));
275
276                         memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz);
277
278                         sreq->len = sreq->block_sz +
279                                     crypto_ahash_digestsize(ahash);
280                         sreq->processed = sreq->block_sz;
281                         sreq->hmac = 0;
282
283                         if (priv->flags & EIP197_TRC_CACHE)
284                                 ctx->base.needs_inv = true;
285                         areq->nbytes = 0;
286                         safexcel_ahash_enqueue(areq);
287
288                         *should_complete = false; /* Not done yet */
289                         return 1;
290                 }
291
292                 if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
293                              ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
294                         /* Undo final XOR with 0xffffffff ...*/
295                         *(__le32 *)areq->result = ~sreq->state[0];
296                 } else {
297                         memcpy(areq->result, sreq->state,
298                                crypto_ahash_digestsize(ahash));
299                 }
300         }
301
302         cache_len = safexcel_queued_len(sreq);
303         if (cache_len)
304                 memcpy(sreq->cache, sreq->cache_next, cache_len);
305
306         *should_complete = true;
307
308         return 1;
309 }
310
311 static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
312                                    int *commands, int *results)
313 {
314         struct ahash_request *areq = ahash_request_cast(async);
315         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
316         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
317         struct safexcel_crypto_priv *priv = ctx->base.priv;
318         struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
319         struct safexcel_result_desc *rdesc;
320         struct scatterlist *sg;
321         struct safexcel_token *dmmy;
322         int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
323         u64 queued, len;
324
325         queued = safexcel_queued_len(req);
326         if (queued <= HASH_CACHE_SIZE)
327                 cache_len = queued;
328         else
329                 cache_len = queued - areq->nbytes;
330
331         if (!req->finish && !req->last_req) {
332                 /* If this is not the last request and the queued data does not
333                  * fit into full cache blocks, cache it for the next send call.
334                  */
335                 extra = queued & (HASH_CACHE_SIZE - 1);
336
337                 /* If this is not the last request and the queued data
338                  * is a multiple of a block, cache the last one for now.
339                  */
340                 if (!extra)
341                         extra = HASH_CACHE_SIZE;
342
343                 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
344                                    req->cache_next, extra,
345                                    areq->nbytes - extra);
346
347                 queued -= extra;
348
349                 if (!queued) {
350                         *commands = 0;
351                         *results = 0;
352                         return 0;
353                 }
354
355                 extra = 0;
356         }
357
358         if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
359                 if (unlikely(cache_len < AES_BLOCK_SIZE)) {
360                         /*
361                          * Cache contains less than 1 full block, complete.
362                          */
363                         extra = AES_BLOCK_SIZE - cache_len;
364                         if (queued > cache_len) {
365                                 /* More data follows: borrow bytes */
366                                 u64 tmp = queued - cache_len;
367
368                                 skip = min_t(u64, tmp, extra);
369                                 sg_pcopy_to_buffer(areq->src,
370                                         sg_nents(areq->src),
371                                         req->cache + cache_len,
372                                         skip, 0);
373                         }
374                         extra -= skip;
375                         memset(req->cache + cache_len + skip, 0, extra);
376                         if (!ctx->cbcmac && extra) {
377                                 // 10- padding for XCBCMAC & CMAC
378                                 req->cache[cache_len + skip] = 0x80;
379                                 // HW will use K2 iso K3 - compensate!
380                                 for (i = 0; i < AES_BLOCK_SIZE / 4; i++) {
381                                         u32 *cache = (void *)req->cache;
382                                         u32 *ipad = ctx->base.ipad.word;
383                                         u32 x;
384
385                                         x = ipad[i] ^ ipad[i + 4];
386                                         cache[i] ^= swab(x);
387                                 }
388                         }
389                         cache_len = AES_BLOCK_SIZE;
390                         queued = queued + extra;
391                 }
392
393                 /* XCBC continue: XOR previous result into 1st word */
394                 crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
395         }
396
397         len = queued;
398         /* Add a command descriptor for the cached data, if any */
399         if (cache_len) {
400                 req->cache_dma = dma_map_single(priv->dev, req->cache,
401                                                 cache_len, DMA_TO_DEVICE);
402                 if (dma_mapping_error(priv->dev, req->cache_dma))
403                         return -EINVAL;
404
405                 req->cache_sz = cache_len;
406                 first_cdesc = safexcel_add_cdesc(priv, ring, 1,
407                                                  (cache_len == len),
408                                                  req->cache_dma, cache_len,
409                                                  len, ctx->base.ctxr_dma,
410                                                  &dmmy);
411                 if (IS_ERR(first_cdesc)) {
412                         ret = PTR_ERR(first_cdesc);
413                         goto unmap_cache;
414                 }
415                 n_cdesc++;
416
417                 queued -= cache_len;
418                 if (!queued)
419                         goto send_command;
420         }
421
422         /* Now handle the current ahash request buffer(s) */
423         req->nents = dma_map_sg(priv->dev, areq->src,
424                                 sg_nents_for_len(areq->src,
425                                                  areq->nbytes),
426                                 DMA_TO_DEVICE);
427         if (!req->nents) {
428                 ret = -ENOMEM;
429                 goto cdesc_rollback;
430         }
431
432         for_each_sg(areq->src, sg, req->nents, i) {
433                 int sglen = sg_dma_len(sg);
434
435                 if (unlikely(sglen <= skip)) {
436                         skip -= sglen;
437                         continue;
438                 }
439
440                 /* Do not overflow the request */
441                 if ((queued + skip) <= sglen)
442                         sglen = queued;
443                 else
444                         sglen -= skip;
445
446                 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
447                                            !(queued - sglen),
448                                            sg_dma_address(sg) + skip, sglen,
449                                            len, ctx->base.ctxr_dma, &dmmy);
450                 if (IS_ERR(cdesc)) {
451                         ret = PTR_ERR(cdesc);
452                         goto unmap_sg;
453                 }
454
455                 if (!n_cdesc)
456                         first_cdesc = cdesc;
457                 n_cdesc++;
458
459                 queued -= sglen;
460                 if (!queued)
461                         break;
462                 skip = 0;
463         }
464
465 send_command:
466         /* Setup the context options */
467         safexcel_context_control(ctx, req, first_cdesc);
468
469         /* Add the token */
470         safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
471
472         req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
473                                          DMA_FROM_DEVICE);
474         if (dma_mapping_error(priv->dev, req->result_dma)) {
475                 ret = -EINVAL;
476                 goto unmap_sg;
477         }
478
479         /* Add a result descriptor */
480         rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
481                                    req->digest_sz);
482         if (IS_ERR(rdesc)) {
483                 ret = PTR_ERR(rdesc);
484                 goto unmap_result;
485         }
486
487         safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
488
489         req->processed += len - extra;
490
491         *commands = n_cdesc;
492         *results = 1;
493         return 0;
494
495 unmap_result:
496         dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
497                          DMA_FROM_DEVICE);
498 unmap_sg:
499         if (req->nents) {
500                 dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
501                 req->nents = 0;
502         }
503 cdesc_rollback:
504         for (i = 0; i < n_cdesc; i++)
505                 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
506 unmap_cache:
507         if (req->cache_dma) {
508                 dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
509                                  DMA_TO_DEVICE);
510                 req->cache_dma = 0;
511                 req->cache_sz = 0;
512         }
513
514         return ret;
515 }
516
517 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
518                                       int ring,
519                                       struct crypto_async_request *async,
520                                       bool *should_complete, int *ret)
521 {
522         struct safexcel_result_desc *rdesc;
523         struct ahash_request *areq = ahash_request_cast(async);
524         struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
525         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
526         int enq_ret;
527
528         *ret = 0;
529
530         rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
531         if (IS_ERR(rdesc)) {
532                 dev_err(priv->dev,
533                         "hash: invalidate: could not retrieve the result descriptor\n");
534                 *ret = PTR_ERR(rdesc);
535         } else {
536                 *ret = safexcel_rdesc_check_errors(priv, rdesc);
537         }
538
539         safexcel_complete(priv, ring);
540
541         if (ctx->base.exit_inv) {
542                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
543                               ctx->base.ctxr_dma);
544
545                 *should_complete = true;
546                 return 1;
547         }
548
549         ring = safexcel_select_ring(priv);
550         ctx->base.ring = ring;
551
552         spin_lock_bh(&priv->ring[ring].queue_lock);
553         enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
554         spin_unlock_bh(&priv->ring[ring].queue_lock);
555
556         if (enq_ret != -EINPROGRESS)
557                 *ret = enq_ret;
558
559         queue_work(priv->ring[ring].workqueue,
560                    &priv->ring[ring].work_data.work);
561
562         *should_complete = false;
563
564         return 1;
565 }
566
567 static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
568                                   struct crypto_async_request *async,
569                                   bool *should_complete, int *ret)
570 {
571         struct ahash_request *areq = ahash_request_cast(async);
572         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
573         int err;
574
575         BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
576
577         if (req->needs_inv) {
578                 req->needs_inv = false;
579                 err = safexcel_handle_inv_result(priv, ring, async,
580                                                  should_complete, ret);
581         } else {
582                 err = safexcel_handle_req_result(priv, ring, async,
583                                                  should_complete, ret);
584         }
585
586         return err;
587 }
588
589 static int safexcel_ahash_send_inv(struct crypto_async_request *async,
590                                    int ring, int *commands, int *results)
591 {
592         struct ahash_request *areq = ahash_request_cast(async);
593         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
594         int ret;
595
596         ret = safexcel_invalidate_cache(async, ctx->base.priv,
597                                         ctx->base.ctxr_dma, ring);
598         if (unlikely(ret))
599                 return ret;
600
601         *commands = 1;
602         *results = 1;
603
604         return 0;
605 }
606
607 static int safexcel_ahash_send(struct crypto_async_request *async,
608                                int ring, int *commands, int *results)
609 {
610         struct ahash_request *areq = ahash_request_cast(async);
611         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
612         int ret;
613
614         if (req->needs_inv)
615                 ret = safexcel_ahash_send_inv(async, ring, commands, results);
616         else
617                 ret = safexcel_ahash_send_req(async, ring, commands, results);
618
619         return ret;
620 }
621
622 static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
623 {
624         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
625         struct safexcel_crypto_priv *priv = ctx->base.priv;
626         EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
627         struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
628         struct safexcel_inv_result result = {};
629         int ring = ctx->base.ring;
630
631         memset(req, 0, EIP197_AHASH_REQ_SIZE);
632
633         /* create invalidation request */
634         init_completion(&result.completion);
635         ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
636                                    safexcel_inv_complete, &result);
637
638         ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
639         ctx = crypto_tfm_ctx(req->base.tfm);
640         ctx->base.exit_inv = true;
641         rctx->needs_inv = true;
642
643         spin_lock_bh(&priv->ring[ring].queue_lock);
644         crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
645         spin_unlock_bh(&priv->ring[ring].queue_lock);
646
647         queue_work(priv->ring[ring].workqueue,
648                    &priv->ring[ring].work_data.work);
649
650         wait_for_completion(&result.completion);
651
652         if (result.error) {
653                 dev_warn(priv->dev, "hash: completion error (%d)\n",
654                          result.error);
655                 return result.error;
656         }
657
658         return 0;
659 }
660
661 /* safexcel_ahash_cache: cache data until at least one request can be sent to
662  * the engine, aka. when there is at least 1 block size in the pipe.
663  */
664 static int safexcel_ahash_cache(struct ahash_request *areq)
665 {
666         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
667         u64 cache_len;
668
669         /* cache_len: everything accepted by the driver but not sent yet,
670          * tot sz handled by update() - last req sz - tot sz handled by send()
671          */
672         cache_len = safexcel_queued_len(req);
673
674         /*
675          * In case there isn't enough bytes to proceed (less than a
676          * block size), cache the data until we have enough.
677          */
678         if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
679                 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
680                                    req->cache + cache_len,
681                                    areq->nbytes, 0);
682                 return 0;
683         }
684
685         /* We couldn't cache all the data */
686         return -E2BIG;
687 }
688
689 static int safexcel_ahash_enqueue(struct ahash_request *areq)
690 {
691         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
692         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
693         struct safexcel_crypto_priv *priv = ctx->base.priv;
694         int ret, ring;
695
696         req->needs_inv = false;
697
698         if (ctx->base.ctxr) {
699                 if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
700                      /* invalidate for *any* non-XCBC continuation */
701                    ((req->not_first && !req->xcbcmac) ||
702                      /* invalidate if (i)digest changed */
703                      memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
704                      /* invalidate for HMAC finish with odigest changed */
705                      (req->finish && req->hmac &&
706                       memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
707                              &ctx->base.opad, req->state_sz))))
708                         /*
709                          * We're still setting needs_inv here, even though it is
710                          * cleared right away, because the needs_inv flag can be
711                          * set in other functions and we want to keep the same
712                          * logic.
713                          */
714                         ctx->base.needs_inv = true;
715
716                 if (ctx->base.needs_inv) {
717                         ctx->base.needs_inv = false;
718                         req->needs_inv = true;
719                 }
720         } else {
721                 ctx->base.ring = safexcel_select_ring(priv);
722                 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
723                                                  EIP197_GFP_FLAGS(areq->base),
724                                                  &ctx->base.ctxr_dma);
725                 if (!ctx->base.ctxr)
726                         return -ENOMEM;
727         }
728         req->not_first = true;
729
730         ring = ctx->base.ring;
731
732         spin_lock_bh(&priv->ring[ring].queue_lock);
733         ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
734         spin_unlock_bh(&priv->ring[ring].queue_lock);
735
736         queue_work(priv->ring[ring].workqueue,
737                    &priv->ring[ring].work_data.work);
738
739         return ret;
740 }
741
742 static int safexcel_ahash_update(struct ahash_request *areq)
743 {
744         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
745         int ret;
746
747         /* If the request is 0 length, do nothing */
748         if (!areq->nbytes)
749                 return 0;
750
751         /* Add request to the cache if it fits */
752         ret = safexcel_ahash_cache(areq);
753
754         /* Update total request length */
755         req->len += areq->nbytes;
756
757         /* If not all data could fit into the cache, go process the excess.
758          * Also go process immediately for an HMAC IV precompute, which
759          * will never be finished at all, but needs to be processed anyway.
760          */
761         if ((ret && !req->finish) || req->last_req)
762                 return safexcel_ahash_enqueue(areq);
763
764         return 0;
765 }
766
767 static int safexcel_ahash_final(struct ahash_request *areq)
768 {
769         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
770         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
771
772         req->finish = true;
773
774         if (unlikely(!req->len && !areq->nbytes)) {
775                 /*
776                  * If we have an overall 0 length *hash* request:
777                  * The HW cannot do 0 length hash, so we provide the correct
778                  * result directly here.
779                  */
780                 if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
781                         memcpy(areq->result, md5_zero_message_hash,
782                                MD5_DIGEST_SIZE);
783                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
784                         memcpy(areq->result, sha1_zero_message_hash,
785                                SHA1_DIGEST_SIZE);
786                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
787                         memcpy(areq->result, sha224_zero_message_hash,
788                                SHA224_DIGEST_SIZE);
789                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
790                         memcpy(areq->result, sha256_zero_message_hash,
791                                SHA256_DIGEST_SIZE);
792                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
793                         memcpy(areq->result, sha384_zero_message_hash,
794                                SHA384_DIGEST_SIZE);
795                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
796                         memcpy(areq->result, sha512_zero_message_hash,
797                                SHA512_DIGEST_SIZE);
798                 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
799                         memcpy(areq->result,
800                                EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
801                 }
802
803                 return 0;
804         } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
805                             ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
806                             req->len == sizeof(u32) && !areq->nbytes)) {
807                 /* Zero length CRC32 */
808                 memcpy(areq->result, &ctx->base.ipad, sizeof(u32));
809                 return 0;
810         } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
811                             !areq->nbytes)) {
812                 /* Zero length CBC MAC */
813                 memset(areq->result, 0, AES_BLOCK_SIZE);
814                 return 0;
815         } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
816                             !areq->nbytes)) {
817                 /* Zero length (X)CBC/CMAC */
818                 int i;
819
820                 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
821                         u32 *result = (void *)areq->result;
822
823                         /* K3 */
824                         result[i] = swab(ctx->base.ipad.word[i + 4]);
825                 }
826                 areq->result[0] ^= 0x80;                        // 10- padding
827                 crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
828                 return 0;
829         } else if (unlikely(req->hmac &&
830                             (req->len == req->block_sz) &&
831                             !areq->nbytes)) {
832                 /*
833                  * If we have an overall 0 length *HMAC* request:
834                  * For HMAC, we need to finalize the inner digest
835                  * and then perform the outer hash.
836                  */
837
838                 /* generate pad block in the cache */
839                 /* start with a hash block of all zeroes */
840                 memset(req->cache, 0, req->block_sz);
841                 /* set the first byte to 0x80 to 'append a 1 bit' */
842                 req->cache[0] = 0x80;
843                 /* add the length in bits in the last 2 bytes */
844                 if (req->len_is_le) {
845                         /* Little endian length word (e.g. MD5) */
846                         req->cache[req->block_sz-8] = (req->block_sz << 3) &
847                                                       255;
848                         req->cache[req->block_sz-7] = (req->block_sz >> 5);
849                 } else {
850                         /* Big endian length word (e.g. any SHA) */
851                         req->cache[req->block_sz-2] = (req->block_sz >> 5);
852                         req->cache[req->block_sz-1] = (req->block_sz << 3) &
853                                                       255;
854                 }
855
856                 req->len += req->block_sz; /* plus 1 hash block */
857
858                 /* Set special zero-length HMAC flag */
859                 req->hmac_zlen = true;
860
861                 /* Finalize HMAC */
862                 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
863         } else if (req->hmac) {
864                 /* Finalize HMAC */
865                 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
866         }
867
868         return safexcel_ahash_enqueue(areq);
869 }
870
871 static int safexcel_ahash_finup(struct ahash_request *areq)
872 {
873         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
874
875         req->finish = true;
876
877         safexcel_ahash_update(areq);
878         return safexcel_ahash_final(areq);
879 }
880
881 static int safexcel_ahash_export(struct ahash_request *areq, void *out)
882 {
883         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
884         struct safexcel_ahash_export_state *export = out;
885
886         export->len = req->len;
887         export->processed = req->processed;
888
889         export->digest = req->digest;
890
891         memcpy(export->state, req->state, req->state_sz);
892         memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
893
894         return 0;
895 }
896
897 static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
898 {
899         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
900         const struct safexcel_ahash_export_state *export = in;
901         int ret;
902
903         ret = crypto_ahash_init(areq);
904         if (ret)
905                 return ret;
906
907         req->len = export->len;
908         req->processed = export->processed;
909
910         req->digest = export->digest;
911
912         memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
913         memcpy(req->state, export->state, req->state_sz);
914
915         return 0;
916 }
917
918 static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
919 {
920         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
921         struct safexcel_alg_template *tmpl =
922                 container_of(__crypto_ahash_alg(tfm->__crt_alg),
923                              struct safexcel_alg_template, alg.ahash);
924
925         ctx->base.priv = tmpl->priv;
926         ctx->base.send = safexcel_ahash_send;
927         ctx->base.handle_result = safexcel_handle_result;
928         ctx->fb_do_setkey = false;
929
930         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
931                                  sizeof(struct safexcel_ahash_req));
932         return 0;
933 }
934
935 static int safexcel_sha1_init(struct ahash_request *areq)
936 {
937         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
938         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
939
940         memset(req, 0, sizeof(*req));
941
942         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
943         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
944         req->state_sz = SHA1_DIGEST_SIZE;
945         req->digest_sz = SHA1_DIGEST_SIZE;
946         req->block_sz = SHA1_BLOCK_SIZE;
947
948         return 0;
949 }
950
951 static int safexcel_sha1_digest(struct ahash_request *areq)
952 {
953         int ret = safexcel_sha1_init(areq);
954
955         if (ret)
956                 return ret;
957
958         return safexcel_ahash_finup(areq);
959 }
960
961 static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
962 {
963         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
964         struct safexcel_crypto_priv *priv = ctx->base.priv;
965         int ret;
966
967         /* context not allocated, skip invalidation */
968         if (!ctx->base.ctxr)
969                 return;
970
971         if (priv->flags & EIP197_TRC_CACHE) {
972                 ret = safexcel_ahash_exit_inv(tfm);
973                 if (ret)
974                         dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
975         } else {
976                 dma_pool_free(priv->context_pool, ctx->base.ctxr,
977                               ctx->base.ctxr_dma);
978         }
979 }
980
981 struct safexcel_alg_template safexcel_alg_sha1 = {
982         .type = SAFEXCEL_ALG_TYPE_AHASH,
983         .algo_mask = SAFEXCEL_ALG_SHA1,
984         .alg.ahash = {
985                 .init = safexcel_sha1_init,
986                 .update = safexcel_ahash_update,
987                 .final = safexcel_ahash_final,
988                 .finup = safexcel_ahash_finup,
989                 .digest = safexcel_sha1_digest,
990                 .export = safexcel_ahash_export,
991                 .import = safexcel_ahash_import,
992                 .halg = {
993                         .digestsize = SHA1_DIGEST_SIZE,
994                         .statesize = sizeof(struct safexcel_ahash_export_state),
995                         .base = {
996                                 .cra_name = "sha1",
997                                 .cra_driver_name = "safexcel-sha1",
998                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
999                                 .cra_flags = CRYPTO_ALG_ASYNC |
1000                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1001                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1002                                 .cra_blocksize = SHA1_BLOCK_SIZE,
1003                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1004                                 .cra_init = safexcel_ahash_cra_init,
1005                                 .cra_exit = safexcel_ahash_cra_exit,
1006                                 .cra_module = THIS_MODULE,
1007                         },
1008                 },
1009         },
1010 };
1011
1012 static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1013 {
1014         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1015         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1016
1017         memset(req, 0, sizeof(*req));
1018
1019         /* Start from ipad precompute */
1020         memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE);
1021         /* Already processed the key^ipad part now! */
1022         req->len        = SHA1_BLOCK_SIZE;
1023         req->processed  = SHA1_BLOCK_SIZE;
1024
1025         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1026         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1027         req->state_sz = SHA1_DIGEST_SIZE;
1028         req->digest_sz = SHA1_DIGEST_SIZE;
1029         req->block_sz = SHA1_BLOCK_SIZE;
1030         req->hmac = true;
1031
1032         return 0;
1033 }
1034
1035 static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1036 {
1037         int ret = safexcel_hmac_sha1_init(areq);
1038
1039         if (ret)
1040                 return ret;
1041
1042         return safexcel_ahash_finup(areq);
1043 }
1044
1045 struct safexcel_ahash_result {
1046         struct completion completion;
1047         int error;
1048 };
1049
1050 static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
1051 {
1052         struct safexcel_ahash_result *result = req->data;
1053
1054         if (error == -EINPROGRESS)
1055                 return;
1056
1057         result->error = error;
1058         complete(&result->completion);
1059 }
1060
1061 static int safexcel_hmac_init_pad(struct ahash_request *areq,
1062                                   unsigned int blocksize, const u8 *key,
1063                                   unsigned int keylen, u8 *ipad, u8 *opad)
1064 {
1065         struct safexcel_ahash_result result;
1066         struct scatterlist sg;
1067         int ret, i;
1068         u8 *keydup;
1069
1070         if (keylen <= blocksize) {
1071                 memcpy(ipad, key, keylen);
1072         } else {
1073                 keydup = kmemdup(key, keylen, GFP_KERNEL);
1074                 if (!keydup)
1075                         return -ENOMEM;
1076
1077                 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1078                                            safexcel_ahash_complete, &result);
1079                 sg_init_one(&sg, keydup, keylen);
1080                 ahash_request_set_crypt(areq, &sg, ipad, keylen);
1081                 init_completion(&result.completion);
1082
1083                 ret = crypto_ahash_digest(areq);
1084                 if (ret == -EINPROGRESS || ret == -EBUSY) {
1085                         wait_for_completion_interruptible(&result.completion);
1086                         ret = result.error;
1087                 }
1088
1089                 /* Avoid leaking */
1090                 kfree_sensitive(keydup);
1091
1092                 if (ret)
1093                         return ret;
1094
1095                 keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1096         }
1097
1098         memset(ipad + keylen, 0, blocksize - keylen);
1099         memcpy(opad, ipad, blocksize);
1100
1101         for (i = 0; i < blocksize; i++) {
1102                 ipad[i] ^= HMAC_IPAD_VALUE;
1103                 opad[i] ^= HMAC_OPAD_VALUE;
1104         }
1105
1106         return 0;
1107 }
1108
1109 static int safexcel_hmac_init_iv(struct ahash_request *areq,
1110                                  unsigned int blocksize, u8 *pad, void *state)
1111 {
1112         struct safexcel_ahash_result result;
1113         struct safexcel_ahash_req *req;
1114         struct scatterlist sg;
1115         int ret;
1116
1117         ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1118                                    safexcel_ahash_complete, &result);
1119         sg_init_one(&sg, pad, blocksize);
1120         ahash_request_set_crypt(areq, &sg, pad, blocksize);
1121         init_completion(&result.completion);
1122
1123         ret = crypto_ahash_init(areq);
1124         if (ret)
1125                 return ret;
1126
1127         req = ahash_request_ctx(areq);
1128         req->hmac = true;
1129         req->last_req = true;
1130
1131         ret = crypto_ahash_update(areq);
1132         if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1133                 return ret;
1134
1135         wait_for_completion_interruptible(&result.completion);
1136         if (result.error)
1137                 return result.error;
1138
1139         return crypto_ahash_export(areq, state);
1140 }
1141
1142 static int __safexcel_hmac_setkey(const char *alg, const u8 *key,
1143                                   unsigned int keylen,
1144                                   void *istate, void *ostate)
1145 {
1146         struct ahash_request *areq;
1147         struct crypto_ahash *tfm;
1148         unsigned int blocksize;
1149         u8 *ipad, *opad;
1150         int ret;
1151
1152         tfm = crypto_alloc_ahash(alg, 0, 0);
1153         if (IS_ERR(tfm))
1154                 return PTR_ERR(tfm);
1155
1156         areq = ahash_request_alloc(tfm, GFP_KERNEL);
1157         if (!areq) {
1158                 ret = -ENOMEM;
1159                 goto free_ahash;
1160         }
1161
1162         crypto_ahash_clear_flags(tfm, ~0);
1163         blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1164
1165         ipad = kcalloc(2, blocksize, GFP_KERNEL);
1166         if (!ipad) {
1167                 ret = -ENOMEM;
1168                 goto free_request;
1169         }
1170
1171         opad = ipad + blocksize;
1172
1173         ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1174         if (ret)
1175                 goto free_ipad;
1176
1177         ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1178         if (ret)
1179                 goto free_ipad;
1180
1181         ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1182
1183 free_ipad:
1184         kfree(ipad);
1185 free_request:
1186         ahash_request_free(areq);
1187 free_ahash:
1188         crypto_free_ahash(tfm);
1189
1190         return ret;
1191 }
1192
1193 int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key,
1194                          unsigned int keylen, const char *alg,
1195                          unsigned int state_sz)
1196 {
1197         struct safexcel_crypto_priv *priv = base->priv;
1198         struct safexcel_ahash_export_state istate, ostate;
1199         int ret;
1200
1201         ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1202         if (ret)
1203                 return ret;
1204
1205         if (priv->flags & EIP197_TRC_CACHE && base->ctxr &&
1206             (memcmp(&base->ipad, istate.state, state_sz) ||
1207              memcmp(&base->opad, ostate.state, state_sz)))
1208                 base->needs_inv = true;
1209
1210         memcpy(&base->ipad, &istate.state, state_sz);
1211         memcpy(&base->opad, &ostate.state, state_sz);
1212
1213         return 0;
1214 }
1215
1216 static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1217                                     unsigned int keylen, const char *alg,
1218                                     unsigned int state_sz)
1219 {
1220         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
1221
1222         return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz);
1223 }
1224
1225 static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1226                                      unsigned int keylen)
1227 {
1228         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1229                                         SHA1_DIGEST_SIZE);
1230 }
1231
1232 struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1233         .type = SAFEXCEL_ALG_TYPE_AHASH,
1234         .algo_mask = SAFEXCEL_ALG_SHA1,
1235         .alg.ahash = {
1236                 .init = safexcel_hmac_sha1_init,
1237                 .update = safexcel_ahash_update,
1238                 .final = safexcel_ahash_final,
1239                 .finup = safexcel_ahash_finup,
1240                 .digest = safexcel_hmac_sha1_digest,
1241                 .setkey = safexcel_hmac_sha1_setkey,
1242                 .export = safexcel_ahash_export,
1243                 .import = safexcel_ahash_import,
1244                 .halg = {
1245                         .digestsize = SHA1_DIGEST_SIZE,
1246                         .statesize = sizeof(struct safexcel_ahash_export_state),
1247                         .base = {
1248                                 .cra_name = "hmac(sha1)",
1249                                 .cra_driver_name = "safexcel-hmac-sha1",
1250                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1251                                 .cra_flags = CRYPTO_ALG_ASYNC |
1252                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1253                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1254                                 .cra_blocksize = SHA1_BLOCK_SIZE,
1255                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1256                                 .cra_init = safexcel_ahash_cra_init,
1257                                 .cra_exit = safexcel_ahash_cra_exit,
1258                                 .cra_module = THIS_MODULE,
1259                         },
1260                 },
1261         },
1262 };
1263
1264 static int safexcel_sha256_init(struct ahash_request *areq)
1265 {
1266         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1267         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1268
1269         memset(req, 0, sizeof(*req));
1270
1271         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1272         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1273         req->state_sz = SHA256_DIGEST_SIZE;
1274         req->digest_sz = SHA256_DIGEST_SIZE;
1275         req->block_sz = SHA256_BLOCK_SIZE;
1276
1277         return 0;
1278 }
1279
1280 static int safexcel_sha256_digest(struct ahash_request *areq)
1281 {
1282         int ret = safexcel_sha256_init(areq);
1283
1284         if (ret)
1285                 return ret;
1286
1287         return safexcel_ahash_finup(areq);
1288 }
1289
1290 struct safexcel_alg_template safexcel_alg_sha256 = {
1291         .type = SAFEXCEL_ALG_TYPE_AHASH,
1292         .algo_mask = SAFEXCEL_ALG_SHA2_256,
1293         .alg.ahash = {
1294                 .init = safexcel_sha256_init,
1295                 .update = safexcel_ahash_update,
1296                 .final = safexcel_ahash_final,
1297                 .finup = safexcel_ahash_finup,
1298                 .digest = safexcel_sha256_digest,
1299                 .export = safexcel_ahash_export,
1300                 .import = safexcel_ahash_import,
1301                 .halg = {
1302                         .digestsize = SHA256_DIGEST_SIZE,
1303                         .statesize = sizeof(struct safexcel_ahash_export_state),
1304                         .base = {
1305                                 .cra_name = "sha256",
1306                                 .cra_driver_name = "safexcel-sha256",
1307                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1308                                 .cra_flags = CRYPTO_ALG_ASYNC |
1309                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1310                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1311                                 .cra_blocksize = SHA256_BLOCK_SIZE,
1312                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1313                                 .cra_init = safexcel_ahash_cra_init,
1314                                 .cra_exit = safexcel_ahash_cra_exit,
1315                                 .cra_module = THIS_MODULE,
1316                         },
1317                 },
1318         },
1319 };
1320
1321 static int safexcel_sha224_init(struct ahash_request *areq)
1322 {
1323         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1324         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1325
1326         memset(req, 0, sizeof(*req));
1327
1328         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1329         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1330         req->state_sz = SHA256_DIGEST_SIZE;
1331         req->digest_sz = SHA256_DIGEST_SIZE;
1332         req->block_sz = SHA256_BLOCK_SIZE;
1333
1334         return 0;
1335 }
1336
1337 static int safexcel_sha224_digest(struct ahash_request *areq)
1338 {
1339         int ret = safexcel_sha224_init(areq);
1340
1341         if (ret)
1342                 return ret;
1343
1344         return safexcel_ahash_finup(areq);
1345 }
1346
1347 struct safexcel_alg_template safexcel_alg_sha224 = {
1348         .type = SAFEXCEL_ALG_TYPE_AHASH,
1349         .algo_mask = SAFEXCEL_ALG_SHA2_256,
1350         .alg.ahash = {
1351                 .init = safexcel_sha224_init,
1352                 .update = safexcel_ahash_update,
1353                 .final = safexcel_ahash_final,
1354                 .finup = safexcel_ahash_finup,
1355                 .digest = safexcel_sha224_digest,
1356                 .export = safexcel_ahash_export,
1357                 .import = safexcel_ahash_import,
1358                 .halg = {
1359                         .digestsize = SHA224_DIGEST_SIZE,
1360                         .statesize = sizeof(struct safexcel_ahash_export_state),
1361                         .base = {
1362                                 .cra_name = "sha224",
1363                                 .cra_driver_name = "safexcel-sha224",
1364                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1365                                 .cra_flags = CRYPTO_ALG_ASYNC |
1366                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1367                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1368                                 .cra_blocksize = SHA224_BLOCK_SIZE,
1369                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1370                                 .cra_init = safexcel_ahash_cra_init,
1371                                 .cra_exit = safexcel_ahash_cra_exit,
1372                                 .cra_module = THIS_MODULE,
1373                         },
1374                 },
1375         },
1376 };
1377
1378 static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1379                                        unsigned int keylen)
1380 {
1381         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1382                                         SHA256_DIGEST_SIZE);
1383 }
1384
1385 static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1386 {
1387         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1388         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1389
1390         memset(req, 0, sizeof(*req));
1391
1392         /* Start from ipad precompute */
1393         memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1394         /* Already processed the key^ipad part now! */
1395         req->len        = SHA256_BLOCK_SIZE;
1396         req->processed  = SHA256_BLOCK_SIZE;
1397
1398         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1399         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1400         req->state_sz = SHA256_DIGEST_SIZE;
1401         req->digest_sz = SHA256_DIGEST_SIZE;
1402         req->block_sz = SHA256_BLOCK_SIZE;
1403         req->hmac = true;
1404
1405         return 0;
1406 }
1407
1408 static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1409 {
1410         int ret = safexcel_hmac_sha224_init(areq);
1411
1412         if (ret)
1413                 return ret;
1414
1415         return safexcel_ahash_finup(areq);
1416 }
1417
1418 struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1419         .type = SAFEXCEL_ALG_TYPE_AHASH,
1420         .algo_mask = SAFEXCEL_ALG_SHA2_256,
1421         .alg.ahash = {
1422                 .init = safexcel_hmac_sha224_init,
1423                 .update = safexcel_ahash_update,
1424                 .final = safexcel_ahash_final,
1425                 .finup = safexcel_ahash_finup,
1426                 .digest = safexcel_hmac_sha224_digest,
1427                 .setkey = safexcel_hmac_sha224_setkey,
1428                 .export = safexcel_ahash_export,
1429                 .import = safexcel_ahash_import,
1430                 .halg = {
1431                         .digestsize = SHA224_DIGEST_SIZE,
1432                         .statesize = sizeof(struct safexcel_ahash_export_state),
1433                         .base = {
1434                                 .cra_name = "hmac(sha224)",
1435                                 .cra_driver_name = "safexcel-hmac-sha224",
1436                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1437                                 .cra_flags = CRYPTO_ALG_ASYNC |
1438                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1439                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1440                                 .cra_blocksize = SHA224_BLOCK_SIZE,
1441                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1442                                 .cra_init = safexcel_ahash_cra_init,
1443                                 .cra_exit = safexcel_ahash_cra_exit,
1444                                 .cra_module = THIS_MODULE,
1445                         },
1446                 },
1447         },
1448 };
1449
1450 static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1451                                      unsigned int keylen)
1452 {
1453         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1454                                         SHA256_DIGEST_SIZE);
1455 }
1456
1457 static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1458 {
1459         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1460         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1461
1462         memset(req, 0, sizeof(*req));
1463
1464         /* Start from ipad precompute */
1465         memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1466         /* Already processed the key^ipad part now! */
1467         req->len        = SHA256_BLOCK_SIZE;
1468         req->processed  = SHA256_BLOCK_SIZE;
1469
1470         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1471         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1472         req->state_sz = SHA256_DIGEST_SIZE;
1473         req->digest_sz = SHA256_DIGEST_SIZE;
1474         req->block_sz = SHA256_BLOCK_SIZE;
1475         req->hmac = true;
1476
1477         return 0;
1478 }
1479
1480 static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1481 {
1482         int ret = safexcel_hmac_sha256_init(areq);
1483
1484         if (ret)
1485                 return ret;
1486
1487         return safexcel_ahash_finup(areq);
1488 }
1489
1490 struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1491         .type = SAFEXCEL_ALG_TYPE_AHASH,
1492         .algo_mask = SAFEXCEL_ALG_SHA2_256,
1493         .alg.ahash = {
1494                 .init = safexcel_hmac_sha256_init,
1495                 .update = safexcel_ahash_update,
1496                 .final = safexcel_ahash_final,
1497                 .finup = safexcel_ahash_finup,
1498                 .digest = safexcel_hmac_sha256_digest,
1499                 .setkey = safexcel_hmac_sha256_setkey,
1500                 .export = safexcel_ahash_export,
1501                 .import = safexcel_ahash_import,
1502                 .halg = {
1503                         .digestsize = SHA256_DIGEST_SIZE,
1504                         .statesize = sizeof(struct safexcel_ahash_export_state),
1505                         .base = {
1506                                 .cra_name = "hmac(sha256)",
1507                                 .cra_driver_name = "safexcel-hmac-sha256",
1508                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1509                                 .cra_flags = CRYPTO_ALG_ASYNC |
1510                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1511                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1512                                 .cra_blocksize = SHA256_BLOCK_SIZE,
1513                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1514                                 .cra_init = safexcel_ahash_cra_init,
1515                                 .cra_exit = safexcel_ahash_cra_exit,
1516                                 .cra_module = THIS_MODULE,
1517                         },
1518                 },
1519         },
1520 };
1521
1522 static int safexcel_sha512_init(struct ahash_request *areq)
1523 {
1524         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1525         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1526
1527         memset(req, 0, sizeof(*req));
1528
1529         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1530         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1531         req->state_sz = SHA512_DIGEST_SIZE;
1532         req->digest_sz = SHA512_DIGEST_SIZE;
1533         req->block_sz = SHA512_BLOCK_SIZE;
1534
1535         return 0;
1536 }
1537
1538 static int safexcel_sha512_digest(struct ahash_request *areq)
1539 {
1540         int ret = safexcel_sha512_init(areq);
1541
1542         if (ret)
1543                 return ret;
1544
1545         return safexcel_ahash_finup(areq);
1546 }
1547
1548 struct safexcel_alg_template safexcel_alg_sha512 = {
1549         .type = SAFEXCEL_ALG_TYPE_AHASH,
1550         .algo_mask = SAFEXCEL_ALG_SHA2_512,
1551         .alg.ahash = {
1552                 .init = safexcel_sha512_init,
1553                 .update = safexcel_ahash_update,
1554                 .final = safexcel_ahash_final,
1555                 .finup = safexcel_ahash_finup,
1556                 .digest = safexcel_sha512_digest,
1557                 .export = safexcel_ahash_export,
1558                 .import = safexcel_ahash_import,
1559                 .halg = {
1560                         .digestsize = SHA512_DIGEST_SIZE,
1561                         .statesize = sizeof(struct safexcel_ahash_export_state),
1562                         .base = {
1563                                 .cra_name = "sha512",
1564                                 .cra_driver_name = "safexcel-sha512",
1565                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1566                                 .cra_flags = CRYPTO_ALG_ASYNC |
1567                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1568                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1569                                 .cra_blocksize = SHA512_BLOCK_SIZE,
1570                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1571                                 .cra_init = safexcel_ahash_cra_init,
1572                                 .cra_exit = safexcel_ahash_cra_exit,
1573                                 .cra_module = THIS_MODULE,
1574                         },
1575                 },
1576         },
1577 };
1578
1579 static int safexcel_sha384_init(struct ahash_request *areq)
1580 {
1581         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1582         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1583
1584         memset(req, 0, sizeof(*req));
1585
1586         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1587         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1588         req->state_sz = SHA512_DIGEST_SIZE;
1589         req->digest_sz = SHA512_DIGEST_SIZE;
1590         req->block_sz = SHA512_BLOCK_SIZE;
1591
1592         return 0;
1593 }
1594
1595 static int safexcel_sha384_digest(struct ahash_request *areq)
1596 {
1597         int ret = safexcel_sha384_init(areq);
1598
1599         if (ret)
1600                 return ret;
1601
1602         return safexcel_ahash_finup(areq);
1603 }
1604
1605 struct safexcel_alg_template safexcel_alg_sha384 = {
1606         .type = SAFEXCEL_ALG_TYPE_AHASH,
1607         .algo_mask = SAFEXCEL_ALG_SHA2_512,
1608         .alg.ahash = {
1609                 .init = safexcel_sha384_init,
1610                 .update = safexcel_ahash_update,
1611                 .final = safexcel_ahash_final,
1612                 .finup = safexcel_ahash_finup,
1613                 .digest = safexcel_sha384_digest,
1614                 .export = safexcel_ahash_export,
1615                 .import = safexcel_ahash_import,
1616                 .halg = {
1617                         .digestsize = SHA384_DIGEST_SIZE,
1618                         .statesize = sizeof(struct safexcel_ahash_export_state),
1619                         .base = {
1620                                 .cra_name = "sha384",
1621                                 .cra_driver_name = "safexcel-sha384",
1622                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1623                                 .cra_flags = CRYPTO_ALG_ASYNC |
1624                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1625                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1626                                 .cra_blocksize = SHA384_BLOCK_SIZE,
1627                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1628                                 .cra_init = safexcel_ahash_cra_init,
1629                                 .cra_exit = safexcel_ahash_cra_exit,
1630                                 .cra_module = THIS_MODULE,
1631                         },
1632                 },
1633         },
1634 };
1635
1636 static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1637                                        unsigned int keylen)
1638 {
1639         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1640                                         SHA512_DIGEST_SIZE);
1641 }
1642
1643 static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1644 {
1645         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1646         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1647
1648         memset(req, 0, sizeof(*req));
1649
1650         /* Start from ipad precompute */
1651         memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1652         /* Already processed the key^ipad part now! */
1653         req->len        = SHA512_BLOCK_SIZE;
1654         req->processed  = SHA512_BLOCK_SIZE;
1655
1656         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1657         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1658         req->state_sz = SHA512_DIGEST_SIZE;
1659         req->digest_sz = SHA512_DIGEST_SIZE;
1660         req->block_sz = SHA512_BLOCK_SIZE;
1661         req->hmac = true;
1662
1663         return 0;
1664 }
1665
1666 static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1667 {
1668         int ret = safexcel_hmac_sha512_init(areq);
1669
1670         if (ret)
1671                 return ret;
1672
1673         return safexcel_ahash_finup(areq);
1674 }
1675
1676 struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1677         .type = SAFEXCEL_ALG_TYPE_AHASH,
1678         .algo_mask = SAFEXCEL_ALG_SHA2_512,
1679         .alg.ahash = {
1680                 .init = safexcel_hmac_sha512_init,
1681                 .update = safexcel_ahash_update,
1682                 .final = safexcel_ahash_final,
1683                 .finup = safexcel_ahash_finup,
1684                 .digest = safexcel_hmac_sha512_digest,
1685                 .setkey = safexcel_hmac_sha512_setkey,
1686                 .export = safexcel_ahash_export,
1687                 .import = safexcel_ahash_import,
1688                 .halg = {
1689                         .digestsize = SHA512_DIGEST_SIZE,
1690                         .statesize = sizeof(struct safexcel_ahash_export_state),
1691                         .base = {
1692                                 .cra_name = "hmac(sha512)",
1693                                 .cra_driver_name = "safexcel-hmac-sha512",
1694                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1695                                 .cra_flags = CRYPTO_ALG_ASYNC |
1696                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1697                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1698                                 .cra_blocksize = SHA512_BLOCK_SIZE,
1699                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1700                                 .cra_init = safexcel_ahash_cra_init,
1701                                 .cra_exit = safexcel_ahash_cra_exit,
1702                                 .cra_module = THIS_MODULE,
1703                         },
1704                 },
1705         },
1706 };
1707
1708 static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1709                                        unsigned int keylen)
1710 {
1711         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1712                                         SHA512_DIGEST_SIZE);
1713 }
1714
1715 static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1716 {
1717         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1718         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1719
1720         memset(req, 0, sizeof(*req));
1721
1722         /* Start from ipad precompute */
1723         memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1724         /* Already processed the key^ipad part now! */
1725         req->len        = SHA512_BLOCK_SIZE;
1726         req->processed  = SHA512_BLOCK_SIZE;
1727
1728         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1729         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1730         req->state_sz = SHA512_DIGEST_SIZE;
1731         req->digest_sz = SHA512_DIGEST_SIZE;
1732         req->block_sz = SHA512_BLOCK_SIZE;
1733         req->hmac = true;
1734
1735         return 0;
1736 }
1737
1738 static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1739 {
1740         int ret = safexcel_hmac_sha384_init(areq);
1741
1742         if (ret)
1743                 return ret;
1744
1745         return safexcel_ahash_finup(areq);
1746 }
1747
1748 struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1749         .type = SAFEXCEL_ALG_TYPE_AHASH,
1750         .algo_mask = SAFEXCEL_ALG_SHA2_512,
1751         .alg.ahash = {
1752                 .init = safexcel_hmac_sha384_init,
1753                 .update = safexcel_ahash_update,
1754                 .final = safexcel_ahash_final,
1755                 .finup = safexcel_ahash_finup,
1756                 .digest = safexcel_hmac_sha384_digest,
1757                 .setkey = safexcel_hmac_sha384_setkey,
1758                 .export = safexcel_ahash_export,
1759                 .import = safexcel_ahash_import,
1760                 .halg = {
1761                         .digestsize = SHA384_DIGEST_SIZE,
1762                         .statesize = sizeof(struct safexcel_ahash_export_state),
1763                         .base = {
1764                                 .cra_name = "hmac(sha384)",
1765                                 .cra_driver_name = "safexcel-hmac-sha384",
1766                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1767                                 .cra_flags = CRYPTO_ALG_ASYNC |
1768                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1769                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1770                                 .cra_blocksize = SHA384_BLOCK_SIZE,
1771                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1772                                 .cra_init = safexcel_ahash_cra_init,
1773                                 .cra_exit = safexcel_ahash_cra_exit,
1774                                 .cra_module = THIS_MODULE,
1775                         },
1776                 },
1777         },
1778 };
1779
1780 static int safexcel_md5_init(struct ahash_request *areq)
1781 {
1782         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1783         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1784
1785         memset(req, 0, sizeof(*req));
1786
1787         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1788         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1789         req->state_sz = MD5_DIGEST_SIZE;
1790         req->digest_sz = MD5_DIGEST_SIZE;
1791         req->block_sz = MD5_HMAC_BLOCK_SIZE;
1792
1793         return 0;
1794 }
1795
1796 static int safexcel_md5_digest(struct ahash_request *areq)
1797 {
1798         int ret = safexcel_md5_init(areq);
1799
1800         if (ret)
1801                 return ret;
1802
1803         return safexcel_ahash_finup(areq);
1804 }
1805
1806 struct safexcel_alg_template safexcel_alg_md5 = {
1807         .type = SAFEXCEL_ALG_TYPE_AHASH,
1808         .algo_mask = SAFEXCEL_ALG_MD5,
1809         .alg.ahash = {
1810                 .init = safexcel_md5_init,
1811                 .update = safexcel_ahash_update,
1812                 .final = safexcel_ahash_final,
1813                 .finup = safexcel_ahash_finup,
1814                 .digest = safexcel_md5_digest,
1815                 .export = safexcel_ahash_export,
1816                 .import = safexcel_ahash_import,
1817                 .halg = {
1818                         .digestsize = MD5_DIGEST_SIZE,
1819                         .statesize = sizeof(struct safexcel_ahash_export_state),
1820                         .base = {
1821                                 .cra_name = "md5",
1822                                 .cra_driver_name = "safexcel-md5",
1823                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1824                                 .cra_flags = CRYPTO_ALG_ASYNC |
1825                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1826                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1827                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1828                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1829                                 .cra_init = safexcel_ahash_cra_init,
1830                                 .cra_exit = safexcel_ahash_cra_exit,
1831                                 .cra_module = THIS_MODULE,
1832                         },
1833                 },
1834         },
1835 };
1836
1837 static int safexcel_hmac_md5_init(struct ahash_request *areq)
1838 {
1839         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1840         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1841
1842         memset(req, 0, sizeof(*req));
1843
1844         /* Start from ipad precompute */
1845         memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE);
1846         /* Already processed the key^ipad part now! */
1847         req->len        = MD5_HMAC_BLOCK_SIZE;
1848         req->processed  = MD5_HMAC_BLOCK_SIZE;
1849
1850         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1851         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1852         req->state_sz = MD5_DIGEST_SIZE;
1853         req->digest_sz = MD5_DIGEST_SIZE;
1854         req->block_sz = MD5_HMAC_BLOCK_SIZE;
1855         req->len_is_le = true; /* MD5 is little endian! ... */
1856         req->hmac = true;
1857
1858         return 0;
1859 }
1860
1861 static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1862                                      unsigned int keylen)
1863 {
1864         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1865                                         MD5_DIGEST_SIZE);
1866 }
1867
1868 static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1869 {
1870         int ret = safexcel_hmac_md5_init(areq);
1871
1872         if (ret)
1873                 return ret;
1874
1875         return safexcel_ahash_finup(areq);
1876 }
1877
1878 struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1879         .type = SAFEXCEL_ALG_TYPE_AHASH,
1880         .algo_mask = SAFEXCEL_ALG_MD5,
1881         .alg.ahash = {
1882                 .init = safexcel_hmac_md5_init,
1883                 .update = safexcel_ahash_update,
1884                 .final = safexcel_ahash_final,
1885                 .finup = safexcel_ahash_finup,
1886                 .digest = safexcel_hmac_md5_digest,
1887                 .setkey = safexcel_hmac_md5_setkey,
1888                 .export = safexcel_ahash_export,
1889                 .import = safexcel_ahash_import,
1890                 .halg = {
1891                         .digestsize = MD5_DIGEST_SIZE,
1892                         .statesize = sizeof(struct safexcel_ahash_export_state),
1893                         .base = {
1894                                 .cra_name = "hmac(md5)",
1895                                 .cra_driver_name = "safexcel-hmac-md5",
1896                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1897                                 .cra_flags = CRYPTO_ALG_ASYNC |
1898                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1899                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1900                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1901                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1902                                 .cra_init = safexcel_ahash_cra_init,
1903                                 .cra_exit = safexcel_ahash_cra_exit,
1904                                 .cra_module = THIS_MODULE,
1905                         },
1906                 },
1907         },
1908 };
1909
1910 static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1911 {
1912         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1913         int ret = safexcel_ahash_cra_init(tfm);
1914
1915         /* Default 'key' is all zeroes */
1916         memset(&ctx->base.ipad, 0, sizeof(u32));
1917         return ret;
1918 }
1919
1920 static int safexcel_crc32_init(struct ahash_request *areq)
1921 {
1922         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1923         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1924
1925         memset(req, 0, sizeof(*req));
1926
1927         /* Start from loaded key */
1928         req->state[0]   = cpu_to_le32(~ctx->base.ipad.word[0]);
1929         /* Set processed to non-zero to enable invalidation detection */
1930         req->len        = sizeof(u32);
1931         req->processed  = sizeof(u32);
1932
1933         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1934         req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1935         req->state_sz = sizeof(u32);
1936         req->digest_sz = sizeof(u32);
1937         req->block_sz = sizeof(u32);
1938
1939         return 0;
1940 }
1941
1942 static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1943                                  unsigned int keylen)
1944 {
1945         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1946
1947         if (keylen != sizeof(u32))
1948                 return -EINVAL;
1949
1950         memcpy(&ctx->base.ipad, key, sizeof(u32));
1951         return 0;
1952 }
1953
1954 static int safexcel_crc32_digest(struct ahash_request *areq)
1955 {
1956         return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1957 }
1958
1959 struct safexcel_alg_template safexcel_alg_crc32 = {
1960         .type = SAFEXCEL_ALG_TYPE_AHASH,
1961         .algo_mask = 0,
1962         .alg.ahash = {
1963                 .init = safexcel_crc32_init,
1964                 .update = safexcel_ahash_update,
1965                 .final = safexcel_ahash_final,
1966                 .finup = safexcel_ahash_finup,
1967                 .digest = safexcel_crc32_digest,
1968                 .setkey = safexcel_crc32_setkey,
1969                 .export = safexcel_ahash_export,
1970                 .import = safexcel_ahash_import,
1971                 .halg = {
1972                         .digestsize = sizeof(u32),
1973                         .statesize = sizeof(struct safexcel_ahash_export_state),
1974                         .base = {
1975                                 .cra_name = "crc32",
1976                                 .cra_driver_name = "safexcel-crc32",
1977                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1978                                 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1979                                              CRYPTO_ALG_ASYNC |
1980                                              CRYPTO_ALG_ALLOCATES_MEMORY |
1981                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
1982                                 .cra_blocksize = 1,
1983                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1984                                 .cra_init = safexcel_crc32_cra_init,
1985                                 .cra_exit = safexcel_ahash_cra_exit,
1986                                 .cra_module = THIS_MODULE,
1987                         },
1988                 },
1989         },
1990 };
1991
1992 static int safexcel_cbcmac_init(struct ahash_request *areq)
1993 {
1994         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1995         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1996
1997         memset(req, 0, sizeof(*req));
1998
1999         /* Start from loaded keys */
2000         memcpy(req->state, &ctx->base.ipad, ctx->key_sz);
2001         /* Set processed to non-zero to enable invalidation detection */
2002         req->len        = AES_BLOCK_SIZE;
2003         req->processed  = AES_BLOCK_SIZE;
2004
2005         req->digest   = CONTEXT_CONTROL_DIGEST_XCM;
2006         req->state_sz = ctx->key_sz;
2007         req->digest_sz = AES_BLOCK_SIZE;
2008         req->block_sz = AES_BLOCK_SIZE;
2009         req->xcbcmac  = true;
2010
2011         return 0;
2012 }
2013
2014 static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2015                                  unsigned int len)
2016 {
2017         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2018         struct crypto_aes_ctx aes;
2019         int ret, i;
2020
2021         ret = aes_expandkey(&aes, key, len);
2022         if (ret)
2023                 return ret;
2024
2025         memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE);
2026         for (i = 0; i < len / sizeof(u32); i++)
2027                 ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]);
2028
2029         if (len == AES_KEYSIZE_192) {
2030                 ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2031                 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2032         } else if (len == AES_KEYSIZE_256) {
2033                 ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2034                 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2035         } else {
2036                 ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2037                 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2038         }
2039         ctx->cbcmac  = true;
2040
2041         memzero_explicit(&aes, sizeof(aes));
2042         return 0;
2043 }
2044
2045 static int safexcel_cbcmac_digest(struct ahash_request *areq)
2046 {
2047         return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2048 }
2049
2050 struct safexcel_alg_template safexcel_alg_cbcmac = {
2051         .type = SAFEXCEL_ALG_TYPE_AHASH,
2052         .algo_mask = 0,
2053         .alg.ahash = {
2054                 .init = safexcel_cbcmac_init,
2055                 .update = safexcel_ahash_update,
2056                 .final = safexcel_ahash_final,
2057                 .finup = safexcel_ahash_finup,
2058                 .digest = safexcel_cbcmac_digest,
2059                 .setkey = safexcel_cbcmac_setkey,
2060                 .export = safexcel_ahash_export,
2061                 .import = safexcel_ahash_import,
2062                 .halg = {
2063                         .digestsize = AES_BLOCK_SIZE,
2064                         .statesize = sizeof(struct safexcel_ahash_export_state),
2065                         .base = {
2066                                 .cra_name = "cbcmac(aes)",
2067                                 .cra_driver_name = "safexcel-cbcmac-aes",
2068                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2069                                 .cra_flags = CRYPTO_ALG_ASYNC |
2070                                              CRYPTO_ALG_ALLOCATES_MEMORY |
2071                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
2072                                 .cra_blocksize = 1,
2073                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2074                                 .cra_init = safexcel_ahash_cra_init,
2075                                 .cra_exit = safexcel_ahash_cra_exit,
2076                                 .cra_module = THIS_MODULE,
2077                         },
2078                 },
2079         },
2080 };
2081
2082 static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2083                                  unsigned int len)
2084 {
2085         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2086         struct crypto_aes_ctx aes;
2087         u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2088         int ret, i;
2089
2090         ret = aes_expandkey(&aes, key, len);
2091         if (ret)
2092                 return ret;
2093
2094         /* precompute the XCBC key material */
2095         crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2096         crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2097                                 CRYPTO_TFM_REQ_MASK);
2098         ret = crypto_cipher_setkey(ctx->kaes, key, len);
2099         if (ret)
2100                 return ret;
2101
2102         crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2103                 "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2104         crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
2105                 "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2106         crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2107                 "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2108         for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2109                 ctx->base.ipad.word[i] = swab(key_tmp[i]);
2110
2111         crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2112         crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2113                                 CRYPTO_TFM_REQ_MASK);
2114         ret = crypto_cipher_setkey(ctx->kaes,
2115                                    (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2116                                    AES_MIN_KEY_SIZE);
2117         if (ret)
2118                 return ret;
2119
2120         ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2121         ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2122         ctx->cbcmac = false;
2123
2124         memzero_explicit(&aes, sizeof(aes));
2125         return 0;
2126 }
2127
2128 static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2129 {
2130         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2131
2132         safexcel_ahash_cra_init(tfm);
2133         ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
2134         return PTR_ERR_OR_ZERO(ctx->kaes);
2135 }
2136
2137 static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2138 {
2139         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2140
2141         crypto_free_cipher(ctx->kaes);
2142         safexcel_ahash_cra_exit(tfm);
2143 }
2144
2145 struct safexcel_alg_template safexcel_alg_xcbcmac = {
2146         .type = SAFEXCEL_ALG_TYPE_AHASH,
2147         .algo_mask = 0,
2148         .alg.ahash = {
2149                 .init = safexcel_cbcmac_init,
2150                 .update = safexcel_ahash_update,
2151                 .final = safexcel_ahash_final,
2152                 .finup = safexcel_ahash_finup,
2153                 .digest = safexcel_cbcmac_digest,
2154                 .setkey = safexcel_xcbcmac_setkey,
2155                 .export = safexcel_ahash_export,
2156                 .import = safexcel_ahash_import,
2157                 .halg = {
2158                         .digestsize = AES_BLOCK_SIZE,
2159                         .statesize = sizeof(struct safexcel_ahash_export_state),
2160                         .base = {
2161                                 .cra_name = "xcbc(aes)",
2162                                 .cra_driver_name = "safexcel-xcbc-aes",
2163                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2164                                 .cra_flags = CRYPTO_ALG_ASYNC |
2165                                              CRYPTO_ALG_ALLOCATES_MEMORY |
2166                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
2167                                 .cra_blocksize = AES_BLOCK_SIZE,
2168                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2169                                 .cra_init = safexcel_xcbcmac_cra_init,
2170                                 .cra_exit = safexcel_xcbcmac_cra_exit,
2171                                 .cra_module = THIS_MODULE,
2172                         },
2173                 },
2174         },
2175 };
2176
2177 static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2178                                 unsigned int len)
2179 {
2180         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2181         struct crypto_aes_ctx aes;
2182         __be64 consts[4];
2183         u64 _const[2];
2184         u8 msb_mask, gfmask;
2185         int ret, i;
2186
2187         ret = aes_expandkey(&aes, key, len);
2188         if (ret)
2189                 return ret;
2190
2191         for (i = 0; i < len / sizeof(u32); i++)
2192                 ctx->base.ipad.word[i + 8] = swab(aes.key_enc[i]);
2193
2194         /* precompute the CMAC key material */
2195         crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2196         crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2197                                 CRYPTO_TFM_REQ_MASK);
2198         ret = crypto_cipher_setkey(ctx->kaes, key, len);
2199         if (ret)
2200                 return ret;
2201
2202         /* code below borrowed from crypto/cmac.c */
2203         /* encrypt the zero block */
2204         memset(consts, 0, AES_BLOCK_SIZE);
2205         crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
2206
2207         gfmask = 0x87;
2208         _const[0] = be64_to_cpu(consts[1]);
2209         _const[1] = be64_to_cpu(consts[0]);
2210
2211         /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2212         for (i = 0; i < 4; i += 2) {
2213                 msb_mask = ((s64)_const[1] >> 63) & gfmask;
2214                 _const[1] = (_const[1] << 1) | (_const[0] >> 63);
2215                 _const[0] = (_const[0] << 1) ^ msb_mask;
2216
2217                 consts[i + 0] = cpu_to_be64(_const[1]);
2218                 consts[i + 1] = cpu_to_be64(_const[0]);
2219         }
2220         /* end of code borrowed from crypto/cmac.c */
2221
2222         for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2223                 ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]);
2224
2225         if (len == AES_KEYSIZE_192) {
2226                 ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2227                 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2228         } else if (len == AES_KEYSIZE_256) {
2229                 ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2230                 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2231         } else {
2232                 ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2233                 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2234         }
2235         ctx->cbcmac = false;
2236
2237         memzero_explicit(&aes, sizeof(aes));
2238         return 0;
2239 }
2240
2241 struct safexcel_alg_template safexcel_alg_cmac = {
2242         .type = SAFEXCEL_ALG_TYPE_AHASH,
2243         .algo_mask = 0,
2244         .alg.ahash = {
2245                 .init = safexcel_cbcmac_init,
2246                 .update = safexcel_ahash_update,
2247                 .final = safexcel_ahash_final,
2248                 .finup = safexcel_ahash_finup,
2249                 .digest = safexcel_cbcmac_digest,
2250                 .setkey = safexcel_cmac_setkey,
2251                 .export = safexcel_ahash_export,
2252                 .import = safexcel_ahash_import,
2253                 .halg = {
2254                         .digestsize = AES_BLOCK_SIZE,
2255                         .statesize = sizeof(struct safexcel_ahash_export_state),
2256                         .base = {
2257                                 .cra_name = "cmac(aes)",
2258                                 .cra_driver_name = "safexcel-cmac-aes",
2259                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2260                                 .cra_flags = CRYPTO_ALG_ASYNC |
2261                                              CRYPTO_ALG_ALLOCATES_MEMORY |
2262                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
2263                                 .cra_blocksize = AES_BLOCK_SIZE,
2264                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2265                                 .cra_init = safexcel_xcbcmac_cra_init,
2266                                 .cra_exit = safexcel_xcbcmac_cra_exit,
2267                                 .cra_module = THIS_MODULE,
2268                         },
2269                 },
2270         },
2271 };
2272
2273 static int safexcel_sm3_init(struct ahash_request *areq)
2274 {
2275         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2276         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2277
2278         memset(req, 0, sizeof(*req));
2279
2280         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2281         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2282         req->state_sz = SM3_DIGEST_SIZE;
2283         req->digest_sz = SM3_DIGEST_SIZE;
2284         req->block_sz = SM3_BLOCK_SIZE;
2285
2286         return 0;
2287 }
2288
2289 static int safexcel_sm3_digest(struct ahash_request *areq)
2290 {
2291         int ret = safexcel_sm3_init(areq);
2292
2293         if (ret)
2294                 return ret;
2295
2296         return safexcel_ahash_finup(areq);
2297 }
2298
2299 struct safexcel_alg_template safexcel_alg_sm3 = {
2300         .type = SAFEXCEL_ALG_TYPE_AHASH,
2301         .algo_mask = SAFEXCEL_ALG_SM3,
2302         .alg.ahash = {
2303                 .init = safexcel_sm3_init,
2304                 .update = safexcel_ahash_update,
2305                 .final = safexcel_ahash_final,
2306                 .finup = safexcel_ahash_finup,
2307                 .digest = safexcel_sm3_digest,
2308                 .export = safexcel_ahash_export,
2309                 .import = safexcel_ahash_import,
2310                 .halg = {
2311                         .digestsize = SM3_DIGEST_SIZE,
2312                         .statesize = sizeof(struct safexcel_ahash_export_state),
2313                         .base = {
2314                                 .cra_name = "sm3",
2315                                 .cra_driver_name = "safexcel-sm3",
2316                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2317                                 .cra_flags = CRYPTO_ALG_ASYNC |
2318                                              CRYPTO_ALG_ALLOCATES_MEMORY |
2319                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
2320                                 .cra_blocksize = SM3_BLOCK_SIZE,
2321                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2322                                 .cra_init = safexcel_ahash_cra_init,
2323                                 .cra_exit = safexcel_ahash_cra_exit,
2324                                 .cra_module = THIS_MODULE,
2325                         },
2326                 },
2327         },
2328 };
2329
2330 static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2331                                     unsigned int keylen)
2332 {
2333         return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2334                                         SM3_DIGEST_SIZE);
2335 }
2336
2337 static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2338 {
2339         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2340         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2341
2342         memset(req, 0, sizeof(*req));
2343
2344         /* Start from ipad precompute */
2345         memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE);
2346         /* Already processed the key^ipad part now! */
2347         req->len        = SM3_BLOCK_SIZE;
2348         req->processed  = SM3_BLOCK_SIZE;
2349
2350         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2351         req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2352         req->state_sz = SM3_DIGEST_SIZE;
2353         req->digest_sz = SM3_DIGEST_SIZE;
2354         req->block_sz = SM3_BLOCK_SIZE;
2355         req->hmac = true;
2356
2357         return 0;
2358 }
2359
2360 static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2361 {
2362         int ret = safexcel_hmac_sm3_init(areq);
2363
2364         if (ret)
2365                 return ret;
2366
2367         return safexcel_ahash_finup(areq);
2368 }
2369
2370 struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2371         .type = SAFEXCEL_ALG_TYPE_AHASH,
2372         .algo_mask = SAFEXCEL_ALG_SM3,
2373         .alg.ahash = {
2374                 .init = safexcel_hmac_sm3_init,
2375                 .update = safexcel_ahash_update,
2376                 .final = safexcel_ahash_final,
2377                 .finup = safexcel_ahash_finup,
2378                 .digest = safexcel_hmac_sm3_digest,
2379                 .setkey = safexcel_hmac_sm3_setkey,
2380                 .export = safexcel_ahash_export,
2381                 .import = safexcel_ahash_import,
2382                 .halg = {
2383                         .digestsize = SM3_DIGEST_SIZE,
2384                         .statesize = sizeof(struct safexcel_ahash_export_state),
2385                         .base = {
2386                                 .cra_name = "hmac(sm3)",
2387                                 .cra_driver_name = "safexcel-hmac-sm3",
2388                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2389                                 .cra_flags = CRYPTO_ALG_ASYNC |
2390                                              CRYPTO_ALG_ALLOCATES_MEMORY |
2391                                              CRYPTO_ALG_KERN_DRIVER_ONLY,
2392                                 .cra_blocksize = SM3_BLOCK_SIZE,
2393                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2394                                 .cra_init = safexcel_ahash_cra_init,
2395                                 .cra_exit = safexcel_ahash_cra_exit,
2396                                 .cra_module = THIS_MODULE,
2397                         },
2398                 },
2399         },
2400 };
2401
2402 static int safexcel_sha3_224_init(struct ahash_request *areq)
2403 {
2404         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2405         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2406         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2407
2408         memset(req, 0, sizeof(*req));
2409
2410         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2411         req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2412         req->state_sz = SHA3_224_DIGEST_SIZE;
2413         req->digest_sz = SHA3_224_DIGEST_SIZE;
2414         req->block_sz = SHA3_224_BLOCK_SIZE;
2415         ctx->do_fallback = false;
2416         ctx->fb_init_done = false;
2417         return 0;
2418 }
2419
2420 static int safexcel_sha3_fbcheck(struct ahash_request *req)
2421 {
2422         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2423         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2424         struct ahash_request *subreq = ahash_request_ctx(req);
2425         int ret = 0;
2426
2427         if (ctx->do_fallback) {
2428                 ahash_request_set_tfm(subreq, ctx->fback);
2429                 ahash_request_set_callback(subreq, req->base.flags,
2430                                            req->base.complete, req->base.data);
2431                 ahash_request_set_crypt(subreq, req->src, req->result,
2432                                         req->nbytes);
2433                 if (!ctx->fb_init_done) {
2434                         if (ctx->fb_do_setkey) {
2435                                 /* Set fallback cipher HMAC key */
2436                                 u8 key[SHA3_224_BLOCK_SIZE];
2437
2438                                 memcpy(key, &ctx->base.ipad,
2439                                        crypto_ahash_blocksize(ctx->fback) / 2);
2440                                 memcpy(key +
2441                                        crypto_ahash_blocksize(ctx->fback) / 2,
2442                                        &ctx->base.opad,
2443                                        crypto_ahash_blocksize(ctx->fback) / 2);
2444                                 ret = crypto_ahash_setkey(ctx->fback, key,
2445                                         crypto_ahash_blocksize(ctx->fback));
2446                                 memzero_explicit(key,
2447                                         crypto_ahash_blocksize(ctx->fback));
2448                                 ctx->fb_do_setkey = false;
2449                         }
2450                         ret = ret ?: crypto_ahash_init(subreq);
2451                         ctx->fb_init_done = true;
2452                 }
2453         }
2454         return ret;
2455 }
2456
2457 static int safexcel_sha3_update(struct ahash_request *req)
2458 {
2459         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2460         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2461         struct ahash_request *subreq = ahash_request_ctx(req);
2462
2463         ctx->do_fallback = true;
2464         return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2465 }
2466
2467 static int safexcel_sha3_final(struct ahash_request *req)
2468 {
2469         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2470         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2471         struct ahash_request *subreq = ahash_request_ctx(req);
2472
2473         ctx->do_fallback = true;
2474         return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2475 }
2476
2477 static int safexcel_sha3_finup(struct ahash_request *req)
2478 {
2479         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2480         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2481         struct ahash_request *subreq = ahash_request_ctx(req);
2482
2483         ctx->do_fallback |= !req->nbytes;
2484         if (ctx->do_fallback)
2485                 /* Update or ex/import happened or len 0, cannot use the HW */
2486                 return safexcel_sha3_fbcheck(req) ?:
2487                        crypto_ahash_finup(subreq);
2488         else
2489                 return safexcel_ahash_finup(req);
2490 }
2491
2492 static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2493 {
2494         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2495         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2496         struct ahash_request *subreq = ahash_request_ctx(req);
2497
2498         ctx->do_fallback = true;
2499         ctx->fb_init_done = false;
2500         return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2501 }
2502
2503 static int safexcel_sha3_224_digest(struct ahash_request *req)
2504 {
2505         if (req->nbytes)
2506                 return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2507
2508         /* HW cannot do zero length hash, use fallback instead */
2509         return safexcel_sha3_digest_fallback(req);
2510 }
2511
2512 static int safexcel_sha3_export(struct ahash_request *req, void *out)
2513 {
2514         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2515         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2516         struct ahash_request *subreq = ahash_request_ctx(req);
2517
2518         ctx->do_fallback = true;
2519         return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2520 }
2521
2522 static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2523 {
2524         struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2525         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2526         struct ahash_request *subreq = ahash_request_ctx(req);
2527
2528         ctx->do_fallback = true;
2529         return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2530         // return safexcel_ahash_import(req, in);
2531 }
2532
2533 static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2534 {
2535         struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2536         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2537
2538         safexcel_ahash_cra_init(tfm);
2539
2540         /* Allocate fallback implementation */
2541         ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2542                                         CRYPTO_ALG_ASYNC |
2543                                         CRYPTO_ALG_NEED_FALLBACK);
2544         if (IS_ERR(ctx->fback))
2545                 return PTR_ERR(ctx->fback);
2546
2547         /* Update statesize from fallback algorithm! */
2548         crypto_hash_alg_common(ahash)->statesize =
2549                 crypto_ahash_statesize(ctx->fback);
2550         crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
2551                                             sizeof(struct ahash_request) +
2552                                             crypto_ahash_reqsize(ctx->fback)));
2553         return 0;
2554 }
2555
2556 static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2557 {
2558         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2559
2560         crypto_free_ahash(ctx->fback);
2561         safexcel_ahash_cra_exit(tfm);
2562 }
2563
2564 struct safexcel_alg_template safexcel_alg_sha3_224 = {
2565         .type = SAFEXCEL_ALG_TYPE_AHASH,
2566         .algo_mask = SAFEXCEL_ALG_SHA3,
2567         .alg.ahash = {
2568                 .init = safexcel_sha3_224_init,
2569                 .update = safexcel_sha3_update,
2570                 .final = safexcel_sha3_final,
2571                 .finup = safexcel_sha3_finup,
2572                 .digest = safexcel_sha3_224_digest,
2573                 .export = safexcel_sha3_export,
2574                 .import = safexcel_sha3_import,
2575                 .halg = {
2576                         .digestsize = SHA3_224_DIGEST_SIZE,
2577                         .statesize = sizeof(struct safexcel_ahash_export_state),
2578                         .base = {
2579                                 .cra_name = "sha3-224",
2580                                 .cra_driver_name = "safexcel-sha3-224",
2581                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2582                                 .cra_flags = CRYPTO_ALG_ASYNC |
2583                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
2584                                              CRYPTO_ALG_NEED_FALLBACK,
2585                                 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2586                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2587                                 .cra_init = safexcel_sha3_cra_init,
2588                                 .cra_exit = safexcel_sha3_cra_exit,
2589                                 .cra_module = THIS_MODULE,
2590                         },
2591                 },
2592         },
2593 };
2594
2595 static int safexcel_sha3_256_init(struct ahash_request *areq)
2596 {
2597         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2598         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2599         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2600
2601         memset(req, 0, sizeof(*req));
2602
2603         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2604         req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2605         req->state_sz = SHA3_256_DIGEST_SIZE;
2606         req->digest_sz = SHA3_256_DIGEST_SIZE;
2607         req->block_sz = SHA3_256_BLOCK_SIZE;
2608         ctx->do_fallback = false;
2609         ctx->fb_init_done = false;
2610         return 0;
2611 }
2612
2613 static int safexcel_sha3_256_digest(struct ahash_request *req)
2614 {
2615         if (req->nbytes)
2616                 return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2617
2618         /* HW cannot do zero length hash, use fallback instead */
2619         return safexcel_sha3_digest_fallback(req);
2620 }
2621
2622 struct safexcel_alg_template safexcel_alg_sha3_256 = {
2623         .type = SAFEXCEL_ALG_TYPE_AHASH,
2624         .algo_mask = SAFEXCEL_ALG_SHA3,
2625         .alg.ahash = {
2626                 .init = safexcel_sha3_256_init,
2627                 .update = safexcel_sha3_update,
2628                 .final = safexcel_sha3_final,
2629                 .finup = safexcel_sha3_finup,
2630                 .digest = safexcel_sha3_256_digest,
2631                 .export = safexcel_sha3_export,
2632                 .import = safexcel_sha3_import,
2633                 .halg = {
2634                         .digestsize = SHA3_256_DIGEST_SIZE,
2635                         .statesize = sizeof(struct safexcel_ahash_export_state),
2636                         .base = {
2637                                 .cra_name = "sha3-256",
2638                                 .cra_driver_name = "safexcel-sha3-256",
2639                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2640                                 .cra_flags = CRYPTO_ALG_ASYNC |
2641                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
2642                                              CRYPTO_ALG_NEED_FALLBACK,
2643                                 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2644                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2645                                 .cra_init = safexcel_sha3_cra_init,
2646                                 .cra_exit = safexcel_sha3_cra_exit,
2647                                 .cra_module = THIS_MODULE,
2648                         },
2649                 },
2650         },
2651 };
2652
2653 static int safexcel_sha3_384_init(struct ahash_request *areq)
2654 {
2655         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2656         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2657         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2658
2659         memset(req, 0, sizeof(*req));
2660
2661         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2662         req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2663         req->state_sz = SHA3_384_DIGEST_SIZE;
2664         req->digest_sz = SHA3_384_DIGEST_SIZE;
2665         req->block_sz = SHA3_384_BLOCK_SIZE;
2666         ctx->do_fallback = false;
2667         ctx->fb_init_done = false;
2668         return 0;
2669 }
2670
2671 static int safexcel_sha3_384_digest(struct ahash_request *req)
2672 {
2673         if (req->nbytes)
2674                 return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2675
2676         /* HW cannot do zero length hash, use fallback instead */
2677         return safexcel_sha3_digest_fallback(req);
2678 }
2679
2680 struct safexcel_alg_template safexcel_alg_sha3_384 = {
2681         .type = SAFEXCEL_ALG_TYPE_AHASH,
2682         .algo_mask = SAFEXCEL_ALG_SHA3,
2683         .alg.ahash = {
2684                 .init = safexcel_sha3_384_init,
2685                 .update = safexcel_sha3_update,
2686                 .final = safexcel_sha3_final,
2687                 .finup = safexcel_sha3_finup,
2688                 .digest = safexcel_sha3_384_digest,
2689                 .export = safexcel_sha3_export,
2690                 .import = safexcel_sha3_import,
2691                 .halg = {
2692                         .digestsize = SHA3_384_DIGEST_SIZE,
2693                         .statesize = sizeof(struct safexcel_ahash_export_state),
2694                         .base = {
2695                                 .cra_name = "sha3-384",
2696                                 .cra_driver_name = "safexcel-sha3-384",
2697                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2698                                 .cra_flags = CRYPTO_ALG_ASYNC |
2699                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
2700                                              CRYPTO_ALG_NEED_FALLBACK,
2701                                 .cra_blocksize = SHA3_384_BLOCK_SIZE,
2702                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2703                                 .cra_init = safexcel_sha3_cra_init,
2704                                 .cra_exit = safexcel_sha3_cra_exit,
2705                                 .cra_module = THIS_MODULE,
2706                         },
2707                 },
2708         },
2709 };
2710
2711 static int safexcel_sha3_512_init(struct ahash_request *areq)
2712 {
2713         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2714         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2715         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2716
2717         memset(req, 0, sizeof(*req));
2718
2719         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2720         req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2721         req->state_sz = SHA3_512_DIGEST_SIZE;
2722         req->digest_sz = SHA3_512_DIGEST_SIZE;
2723         req->block_sz = SHA3_512_BLOCK_SIZE;
2724         ctx->do_fallback = false;
2725         ctx->fb_init_done = false;
2726         return 0;
2727 }
2728
2729 static int safexcel_sha3_512_digest(struct ahash_request *req)
2730 {
2731         if (req->nbytes)
2732                 return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2733
2734         /* HW cannot do zero length hash, use fallback instead */
2735         return safexcel_sha3_digest_fallback(req);
2736 }
2737
2738 struct safexcel_alg_template safexcel_alg_sha3_512 = {
2739         .type = SAFEXCEL_ALG_TYPE_AHASH,
2740         .algo_mask = SAFEXCEL_ALG_SHA3,
2741         .alg.ahash = {
2742                 .init = safexcel_sha3_512_init,
2743                 .update = safexcel_sha3_update,
2744                 .final = safexcel_sha3_final,
2745                 .finup = safexcel_sha3_finup,
2746                 .digest = safexcel_sha3_512_digest,
2747                 .export = safexcel_sha3_export,
2748                 .import = safexcel_sha3_import,
2749                 .halg = {
2750                         .digestsize = SHA3_512_DIGEST_SIZE,
2751                         .statesize = sizeof(struct safexcel_ahash_export_state),
2752                         .base = {
2753                                 .cra_name = "sha3-512",
2754                                 .cra_driver_name = "safexcel-sha3-512",
2755                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2756                                 .cra_flags = CRYPTO_ALG_ASYNC |
2757                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
2758                                              CRYPTO_ALG_NEED_FALLBACK,
2759                                 .cra_blocksize = SHA3_512_BLOCK_SIZE,
2760                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2761                                 .cra_init = safexcel_sha3_cra_init,
2762                                 .cra_exit = safexcel_sha3_cra_exit,
2763                                 .cra_module = THIS_MODULE,
2764                         },
2765                 },
2766         },
2767 };
2768
2769 static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2770 {
2771         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2772         int ret;
2773
2774         ret = safexcel_sha3_cra_init(tfm);
2775         if (ret)
2776                 return ret;
2777
2778         /* Allocate precalc basic digest implementation */
2779         ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2780         if (IS_ERR(ctx->shpre))
2781                 return PTR_ERR(ctx->shpre);
2782
2783         ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2784                               crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2785         if (!ctx->shdesc) {
2786                 crypto_free_shash(ctx->shpre);
2787                 return -ENOMEM;
2788         }
2789         ctx->shdesc->tfm = ctx->shpre;
2790         return 0;
2791 }
2792
2793 static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2794 {
2795         struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2796
2797         crypto_free_ahash(ctx->fback);
2798         crypto_free_shash(ctx->shpre);
2799         kfree(ctx->shdesc);
2800         safexcel_ahash_cra_exit(tfm);
2801 }
2802
2803 static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2804                                      unsigned int keylen)
2805 {
2806         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2807         int ret = 0;
2808
2809         if (keylen > crypto_ahash_blocksize(tfm)) {
2810                 /*
2811                  * If the key is larger than the blocksize, then hash it
2812                  * first using our fallback cipher
2813                  */
2814                 ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2815                                           ctx->base.ipad.byte);
2816                 keylen = crypto_shash_digestsize(ctx->shpre);
2817
2818                 /*
2819                  * If the digest is larger than half the blocksize, we need to
2820                  * move the rest to opad due to the way our HMAC infra works.
2821                  */
2822                 if (keylen > crypto_ahash_blocksize(tfm) / 2)
2823                         /* Buffers overlap, need to use memmove iso memcpy! */
2824                         memmove(&ctx->base.opad,
2825                                 ctx->base.ipad.byte +
2826                                         crypto_ahash_blocksize(tfm) / 2,
2827                                 keylen - crypto_ahash_blocksize(tfm) / 2);
2828         } else {
2829                 /*
2830                  * Copy the key to our ipad & opad buffers
2831                  * Note that ipad and opad each contain one half of the key,
2832                  * to match the existing HMAC driver infrastructure.
2833                  */
2834                 if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2835                         memcpy(&ctx->base.ipad, key, keylen);
2836                 } else {
2837                         memcpy(&ctx->base.ipad, key,
2838                                crypto_ahash_blocksize(tfm) / 2);
2839                         memcpy(&ctx->base.opad,
2840                                key + crypto_ahash_blocksize(tfm) / 2,
2841                                keylen - crypto_ahash_blocksize(tfm) / 2);
2842                 }
2843         }
2844
2845         /* Pad key with zeroes */
2846         if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2847                 memset(ctx->base.ipad.byte + keylen, 0,
2848                        crypto_ahash_blocksize(tfm) / 2 - keylen);
2849                 memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2);
2850         } else {
2851                 memset(ctx->base.opad.byte + keylen -
2852                        crypto_ahash_blocksize(tfm) / 2, 0,
2853                        crypto_ahash_blocksize(tfm) - keylen);
2854         }
2855
2856         /* If doing fallback, still need to set the new key! */
2857         ctx->fb_do_setkey = true;
2858         return ret;
2859 }
2860
2861 static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2862 {
2863         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2864         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2865         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2866
2867         memset(req, 0, sizeof(*req));
2868
2869         /* Copy (half of) the key */
2870         memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2);
2871         /* Start of HMAC should have len == processed == blocksize */
2872         req->len        = SHA3_224_BLOCK_SIZE;
2873         req->processed  = SHA3_224_BLOCK_SIZE;
2874         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2875         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2876         req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2877         req->digest_sz = SHA3_224_DIGEST_SIZE;
2878         req->block_sz = SHA3_224_BLOCK_SIZE;
2879         req->hmac = true;
2880         ctx->do_fallback = false;
2881         ctx->fb_init_done = false;
2882         return 0;
2883 }
2884
2885 static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2886 {
2887         if (req->nbytes)
2888                 return safexcel_hmac_sha3_224_init(req) ?:
2889                        safexcel_ahash_finup(req);
2890
2891         /* HW cannot do zero length HMAC, use fallback instead */
2892         return safexcel_sha3_digest_fallback(req);
2893 }
2894
2895 static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2896 {
2897         return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2898 }
2899
2900 struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2901         .type = SAFEXCEL_ALG_TYPE_AHASH,
2902         .algo_mask = SAFEXCEL_ALG_SHA3,
2903         .alg.ahash = {
2904                 .init = safexcel_hmac_sha3_224_init,
2905                 .update = safexcel_sha3_update,
2906                 .final = safexcel_sha3_final,
2907                 .finup = safexcel_sha3_finup,
2908                 .digest = safexcel_hmac_sha3_224_digest,
2909                 .setkey = safexcel_hmac_sha3_setkey,
2910                 .export = safexcel_sha3_export,
2911                 .import = safexcel_sha3_import,
2912                 .halg = {
2913                         .digestsize = SHA3_224_DIGEST_SIZE,
2914                         .statesize = sizeof(struct safexcel_ahash_export_state),
2915                         .base = {
2916                                 .cra_name = "hmac(sha3-224)",
2917                                 .cra_driver_name = "safexcel-hmac-sha3-224",
2918                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2919                                 .cra_flags = CRYPTO_ALG_ASYNC |
2920                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
2921                                              CRYPTO_ALG_NEED_FALLBACK,
2922                                 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2923                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2924                                 .cra_init = safexcel_hmac_sha3_224_cra_init,
2925                                 .cra_exit = safexcel_hmac_sha3_cra_exit,
2926                                 .cra_module = THIS_MODULE,
2927                         },
2928                 },
2929         },
2930 };
2931
2932 static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2933 {
2934         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2935         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2936         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2937
2938         memset(req, 0, sizeof(*req));
2939
2940         /* Copy (half of) the key */
2941         memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2);
2942         /* Start of HMAC should have len == processed == blocksize */
2943         req->len        = SHA3_256_BLOCK_SIZE;
2944         req->processed  = SHA3_256_BLOCK_SIZE;
2945         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2946         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2947         req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2948         req->digest_sz = SHA3_256_DIGEST_SIZE;
2949         req->block_sz = SHA3_256_BLOCK_SIZE;
2950         req->hmac = true;
2951         ctx->do_fallback = false;
2952         ctx->fb_init_done = false;
2953         return 0;
2954 }
2955
2956 static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2957 {
2958         if (req->nbytes)
2959                 return safexcel_hmac_sha3_256_init(req) ?:
2960                        safexcel_ahash_finup(req);
2961
2962         /* HW cannot do zero length HMAC, use fallback instead */
2963         return safexcel_sha3_digest_fallback(req);
2964 }
2965
2966 static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2967 {
2968         return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2969 }
2970
2971 struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2972         .type = SAFEXCEL_ALG_TYPE_AHASH,
2973         .algo_mask = SAFEXCEL_ALG_SHA3,
2974         .alg.ahash = {
2975                 .init = safexcel_hmac_sha3_256_init,
2976                 .update = safexcel_sha3_update,
2977                 .final = safexcel_sha3_final,
2978                 .finup = safexcel_sha3_finup,
2979                 .digest = safexcel_hmac_sha3_256_digest,
2980                 .setkey = safexcel_hmac_sha3_setkey,
2981                 .export = safexcel_sha3_export,
2982                 .import = safexcel_sha3_import,
2983                 .halg = {
2984                         .digestsize = SHA3_256_DIGEST_SIZE,
2985                         .statesize = sizeof(struct safexcel_ahash_export_state),
2986                         .base = {
2987                                 .cra_name = "hmac(sha3-256)",
2988                                 .cra_driver_name = "safexcel-hmac-sha3-256",
2989                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2990                                 .cra_flags = CRYPTO_ALG_ASYNC |
2991                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
2992                                              CRYPTO_ALG_NEED_FALLBACK,
2993                                 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2994                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2995                                 .cra_init = safexcel_hmac_sha3_256_cra_init,
2996                                 .cra_exit = safexcel_hmac_sha3_cra_exit,
2997                                 .cra_module = THIS_MODULE,
2998                         },
2999                 },
3000         },
3001 };
3002
3003 static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
3004 {
3005         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3006         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3007         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3008
3009         memset(req, 0, sizeof(*req));
3010
3011         /* Copy (half of) the key */
3012         memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2);
3013         /* Start of HMAC should have len == processed == blocksize */
3014         req->len        = SHA3_384_BLOCK_SIZE;
3015         req->processed  = SHA3_384_BLOCK_SIZE;
3016         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
3017         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3018         req->state_sz = SHA3_384_BLOCK_SIZE / 2;
3019         req->digest_sz = SHA3_384_DIGEST_SIZE;
3020         req->block_sz = SHA3_384_BLOCK_SIZE;
3021         req->hmac = true;
3022         ctx->do_fallback = false;
3023         ctx->fb_init_done = false;
3024         return 0;
3025 }
3026
3027 static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
3028 {
3029         if (req->nbytes)
3030                 return safexcel_hmac_sha3_384_init(req) ?:
3031                        safexcel_ahash_finup(req);
3032
3033         /* HW cannot do zero length HMAC, use fallback instead */
3034         return safexcel_sha3_digest_fallback(req);
3035 }
3036
3037 static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
3038 {
3039         return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
3040 }
3041
3042 struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
3043         .type = SAFEXCEL_ALG_TYPE_AHASH,
3044         .algo_mask = SAFEXCEL_ALG_SHA3,
3045         .alg.ahash = {
3046                 .init = safexcel_hmac_sha3_384_init,
3047                 .update = safexcel_sha3_update,
3048                 .final = safexcel_sha3_final,
3049                 .finup = safexcel_sha3_finup,
3050                 .digest = safexcel_hmac_sha3_384_digest,
3051                 .setkey = safexcel_hmac_sha3_setkey,
3052                 .export = safexcel_sha3_export,
3053                 .import = safexcel_sha3_import,
3054                 .halg = {
3055                         .digestsize = SHA3_384_DIGEST_SIZE,
3056                         .statesize = sizeof(struct safexcel_ahash_export_state),
3057                         .base = {
3058                                 .cra_name = "hmac(sha3-384)",
3059                                 .cra_driver_name = "safexcel-hmac-sha3-384",
3060                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3061                                 .cra_flags = CRYPTO_ALG_ASYNC |
3062                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
3063                                              CRYPTO_ALG_NEED_FALLBACK,
3064                                 .cra_blocksize = SHA3_384_BLOCK_SIZE,
3065                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3066                                 .cra_init = safexcel_hmac_sha3_384_cra_init,
3067                                 .cra_exit = safexcel_hmac_sha3_cra_exit,
3068                                 .cra_module = THIS_MODULE,
3069                         },
3070                 },
3071         },
3072 };
3073
3074 static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3075 {
3076         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3077         struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3078         struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3079
3080         memset(req, 0, sizeof(*req));
3081
3082         /* Copy (half of) the key */
3083         memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2);
3084         /* Start of HMAC should have len == processed == blocksize */
3085         req->len        = SHA3_512_BLOCK_SIZE;
3086         req->processed  = SHA3_512_BLOCK_SIZE;
3087         ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3088         req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3089         req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3090         req->digest_sz = SHA3_512_DIGEST_SIZE;
3091         req->block_sz = SHA3_512_BLOCK_SIZE;
3092         req->hmac = true;
3093         ctx->do_fallback = false;
3094         ctx->fb_init_done = false;
3095         return 0;
3096 }
3097
3098 static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3099 {
3100         if (req->nbytes)
3101                 return safexcel_hmac_sha3_512_init(req) ?:
3102                        safexcel_ahash_finup(req);
3103
3104         /* HW cannot do zero length HMAC, use fallback instead */
3105         return safexcel_sha3_digest_fallback(req);
3106 }
3107
3108 static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3109 {
3110         return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3111 }
3112 struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3113         .type = SAFEXCEL_ALG_TYPE_AHASH,
3114         .algo_mask = SAFEXCEL_ALG_SHA3,
3115         .alg.ahash = {
3116                 .init = safexcel_hmac_sha3_512_init,
3117                 .update = safexcel_sha3_update,
3118                 .final = safexcel_sha3_final,
3119                 .finup = safexcel_sha3_finup,
3120                 .digest = safexcel_hmac_sha3_512_digest,
3121                 .setkey = safexcel_hmac_sha3_setkey,
3122                 .export = safexcel_sha3_export,
3123                 .import = safexcel_sha3_import,
3124                 .halg = {
3125                         .digestsize = SHA3_512_DIGEST_SIZE,
3126                         .statesize = sizeof(struct safexcel_ahash_export_state),
3127                         .base = {
3128                                 .cra_name = "hmac(sha3-512)",
3129                                 .cra_driver_name = "safexcel-hmac-sha3-512",
3130                                 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3131                                 .cra_flags = CRYPTO_ALG_ASYNC |
3132                                              CRYPTO_ALG_KERN_DRIVER_ONLY |
3133                                              CRYPTO_ALG_NEED_FALLBACK,
3134                                 .cra_blocksize = SHA3_512_BLOCK_SIZE,
3135                                 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3136                                 .cra_init = safexcel_hmac_sha3_512_cra_init,
3137                                 .cra_exit = safexcel_hmac_sha3_cra_exit,
3138                                 .cra_module = THIS_MODULE,
3139                         },
3140                 },
3141         },
3142 };