GNU Linux-libre 6.8.7-gnu
[releases.git] / drivers / crypto / amcc / crypto4xx_alg.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * AMCC SoC PPC4xx Crypto Driver
4  *
5  * Copyright (c) 2008 Applied Micro Circuits Corporation.
6  * All rights reserved. James Hsiao <jhsiao@amcc.com>
7  *
8  * This file implements the Linux crypto algorithms.
9  */
10
11 #include <linux/kernel.h>
12 #include <linux/interrupt.h>
13 #include <linux/spinlock_types.h>
14 #include <linux/scatterlist.h>
15 #include <linux/crypto.h>
16 #include <linux/hash.h>
17 #include <crypto/internal/hash.h>
18 #include <linux/dma-mapping.h>
19 #include <crypto/algapi.h>
20 #include <crypto/aead.h>
21 #include <crypto/aes.h>
22 #include <crypto/gcm.h>
23 #include <crypto/sha1.h>
24 #include <crypto/ctr.h>
25 #include <crypto/skcipher.h>
26 #include "crypto4xx_reg_def.h"
27 #include "crypto4xx_core.h"
28 #include "crypto4xx_sa.h"
29
30 static void set_dynamic_sa_command_0(struct dynamic_sa_ctl *sa, u32 save_h,
31                                      u32 save_iv, u32 ld_h, u32 ld_iv,
32                                      u32 hdr_proc, u32 h, u32 c, u32 pad_type,
33                                      u32 op_grp, u32 op, u32 dir)
34 {
35         sa->sa_command_0.w = 0;
36         sa->sa_command_0.bf.save_hash_state = save_h;
37         sa->sa_command_0.bf.save_iv = save_iv;
38         sa->sa_command_0.bf.load_hash_state = ld_h;
39         sa->sa_command_0.bf.load_iv = ld_iv;
40         sa->sa_command_0.bf.hdr_proc = hdr_proc;
41         sa->sa_command_0.bf.hash_alg = h;
42         sa->sa_command_0.bf.cipher_alg = c;
43         sa->sa_command_0.bf.pad_type = pad_type & 3;
44         sa->sa_command_0.bf.extend_pad = pad_type >> 2;
45         sa->sa_command_0.bf.op_group = op_grp;
46         sa->sa_command_0.bf.opcode = op;
47         sa->sa_command_0.bf.dir = dir;
48 }
49
50 static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
51                                      u32 hmac_mc, u32 cfb, u32 esn,
52                                      u32 sn_mask, u32 mute, u32 cp_pad,
53                                      u32 cp_pay, u32 cp_hdr)
54 {
55         sa->sa_command_1.w = 0;
56         sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2;
57         sa->sa_command_1.bf.crypto_mode9_8 = cm & 3;
58         sa->sa_command_1.bf.feedback_mode = cfb;
59         sa->sa_command_1.bf.sa_rev = 1;
60         sa->sa_command_1.bf.hmac_muting = hmac_mc;
61         sa->sa_command_1.bf.extended_seq_num = esn;
62         sa->sa_command_1.bf.seq_num_mask = sn_mask;
63         sa->sa_command_1.bf.mutable_bit_proc = mute;
64         sa->sa_command_1.bf.copy_pad = cp_pad;
65         sa->sa_command_1.bf.copy_payload = cp_pay;
66         sa->sa_command_1.bf.copy_hdr = cp_hdr;
67 }
68
69 static inline int crypto4xx_crypt(struct skcipher_request *req,
70                                   const unsigned int ivlen, bool decrypt,
71                                   bool check_blocksize)
72 {
73         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
74         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
75         __le32 iv[AES_IV_SIZE];
76
77         if (check_blocksize && !IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
78                 return -EINVAL;
79
80         if (ivlen)
81                 crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
82
83         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
84                 req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
85                 ctx->sa_len, 0, NULL);
86 }
87
88 int crypto4xx_encrypt_noiv_block(struct skcipher_request *req)
89 {
90         return crypto4xx_crypt(req, 0, false, true);
91 }
92
93 int crypto4xx_encrypt_iv_stream(struct skcipher_request *req)
94 {
95         return crypto4xx_crypt(req, AES_IV_SIZE, false, false);
96 }
97
98 int crypto4xx_decrypt_noiv_block(struct skcipher_request *req)
99 {
100         return crypto4xx_crypt(req, 0, true, true);
101 }
102
103 int crypto4xx_decrypt_iv_stream(struct skcipher_request *req)
104 {
105         return crypto4xx_crypt(req, AES_IV_SIZE, true, false);
106 }
107
108 int crypto4xx_encrypt_iv_block(struct skcipher_request *req)
109 {
110         return crypto4xx_crypt(req, AES_IV_SIZE, false, true);
111 }
112
113 int crypto4xx_decrypt_iv_block(struct skcipher_request *req)
114 {
115         return crypto4xx_crypt(req, AES_IV_SIZE, true, true);
116 }
117
118 /*
119  * AES Functions
120  */
121 static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
122                                 const u8 *key,
123                                 unsigned int keylen,
124                                 unsigned char cm,
125                                 u8 fb)
126 {
127         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
128         struct dynamic_sa_ctl *sa;
129         int    rc;
130
131         if (keylen != AES_KEYSIZE_256 && keylen != AES_KEYSIZE_192 &&
132             keylen != AES_KEYSIZE_128)
133                 return -EINVAL;
134
135         /* Create SA */
136         if (ctx->sa_in || ctx->sa_out)
137                 crypto4xx_free_sa(ctx);
138
139         rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
140         if (rc)
141                 return rc;
142
143         /* Setup SA */
144         sa = ctx->sa_in;
145
146         set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_ECB ?
147                                  SA_NOT_SAVE_IV : SA_SAVE_IV),
148                                  SA_NOT_LOAD_HASH, (cm == CRYPTO_MODE_ECB ?
149                                  SA_LOAD_IV_FROM_SA : SA_LOAD_IV_FROM_STATE),
150                                  SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
151                                  SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
152                                  SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT,
153                                  DIR_INBOUND);
154
155         set_dynamic_sa_command_1(sa, cm, SA_HASH_MODE_HASH,
156                                  fb, SA_EXTENDED_SN_OFF,
157                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
158                                  SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
159                                  SA_NOT_COPY_HDR);
160         crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
161                                  key, keylen);
162         sa->sa_contents.w = SA_AES_CONTENTS | (keylen << 2);
163         sa->sa_command_1.bf.key_len = keylen >> 3;
164
165         memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
166         sa = ctx->sa_out;
167         sa->sa_command_0.bf.dir = DIR_OUTBOUND;
168         /*
169          * SA_OPCODE_ENCRYPT is the same value as SA_OPCODE_DECRYPT.
170          * it's the DIR_(IN|OUT)BOUND that matters
171          */
172         sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT;
173
174         return 0;
175 }
176
177 int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
178                              const u8 *key, unsigned int keylen)
179 {
180         return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
181                                     CRYPTO_FEEDBACK_MODE_NO_FB);
182 }
183
184 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
185                              const u8 *key, unsigned int keylen)
186 {
187         return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB,
188                                     CRYPTO_FEEDBACK_MODE_NO_FB);
189 }
190
191 int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
192                              const u8 *key, unsigned int keylen)
193 {
194         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
195         int rc;
196
197         rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE,
198                 CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
199         if (rc)
200                 return rc;
201
202         ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen -
203                                                  CTR_RFC3686_NONCE_SIZE]);
204
205         return 0;
206 }
207
208 int crypto4xx_rfc3686_encrypt(struct skcipher_request *req)
209 {
210         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
211         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
212         __le32 iv[AES_IV_SIZE / 4] = {
213                 ctx->iv_nonce,
214                 cpu_to_le32p((u32 *) req->iv),
215                 cpu_to_le32p((u32 *) (req->iv + 4)),
216                 cpu_to_le32(1) };
217
218         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
219                                   req->cryptlen, iv, AES_IV_SIZE,
220                                   ctx->sa_out, ctx->sa_len, 0, NULL);
221 }
222
223 int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
224 {
225         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
226         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
227         __le32 iv[AES_IV_SIZE / 4] = {
228                 ctx->iv_nonce,
229                 cpu_to_le32p((u32 *) req->iv),
230                 cpu_to_le32p((u32 *) (req->iv + 4)),
231                 cpu_to_le32(1) };
232
233         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
234                                   req->cryptlen, iv, AES_IV_SIZE,
235                                   ctx->sa_out, ctx->sa_len, 0, NULL);
236 }
237
238 static int
239 crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
240 {
241         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
242         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
243         size_t iv_len = crypto_skcipher_ivsize(cipher);
244         unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
245         unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
246                         AES_BLOCK_SIZE;
247
248         /*
249          * The hardware uses only the last 32-bits as the counter while the
250          * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
251          * the whole IV is a counter.  So fallback if the counter is going to
252          * overlow.
253          */
254         if (counter + nblks < counter) {
255                 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher.cipher);
256                 int ret;
257
258                 skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher.cipher);
259                 skcipher_request_set_callback(subreq, req->base.flags,
260                         NULL, NULL);
261                 skcipher_request_set_crypt(subreq, req->src, req->dst,
262                         req->cryptlen, req->iv);
263                 ret = encrypt ? crypto_skcipher_encrypt(subreq)
264                         : crypto_skcipher_decrypt(subreq);
265                 skcipher_request_zero(subreq);
266                 return ret;
267         }
268
269         return encrypt ? crypto4xx_encrypt_iv_stream(req)
270                        : crypto4xx_decrypt_iv_stream(req);
271 }
272
273 static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
274                                        struct crypto_skcipher *cipher,
275                                        const u8 *key,
276                                        unsigned int keylen)
277 {
278         crypto_sync_skcipher_clear_flags(ctx->sw_cipher.cipher,
279                                     CRYPTO_TFM_REQ_MASK);
280         crypto_sync_skcipher_set_flags(ctx->sw_cipher.cipher,
281                 crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
282         return crypto_sync_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
283 }
284
285 int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
286                              const u8 *key, unsigned int keylen)
287 {
288         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
289         int rc;
290
291         rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
292         if (rc)
293                 return rc;
294
295         return crypto4xx_setkey_aes(cipher, key, keylen,
296                 CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
297 }
298
299 int crypto4xx_encrypt_ctr(struct skcipher_request *req)
300 {
301         return crypto4xx_ctr_crypt(req, true);
302 }
303
304 int crypto4xx_decrypt_ctr(struct skcipher_request *req)
305 {
306         return crypto4xx_ctr_crypt(req, false);
307 }
308
309 static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
310                                                 unsigned int len,
311                                                 bool is_ccm, bool decrypt)
312 {
313         struct crypto_aead *aead = crypto_aead_reqtfm(req);
314
315         /* authsize has to be a multiple of 4 */
316         if (aead->authsize & 3)
317                 return true;
318
319         /*
320          * hardware does not handle cases where plaintext
321          * is less than a block.
322          */
323         if (len < AES_BLOCK_SIZE)
324                 return true;
325
326         /* assoc len needs to be a multiple of 4 and <= 1020 */
327         if (req->assoclen & 0x3 || req->assoclen > 1020)
328                 return true;
329
330         /* CCM supports only counter field length of 2 and 4 bytes */
331         if (is_ccm && !(req->iv[0] == 1 || req->iv[0] == 3))
332                 return true;
333
334         return false;
335 }
336
337 static int crypto4xx_aead_fallback(struct aead_request *req,
338         struct crypto4xx_ctx *ctx, bool do_decrypt)
339 {
340         struct aead_request *subreq = aead_request_ctx(req);
341
342         aead_request_set_tfm(subreq, ctx->sw_cipher.aead);
343         aead_request_set_callback(subreq, req->base.flags,
344                                   req->base.complete, req->base.data);
345         aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
346                                req->iv);
347         aead_request_set_ad(subreq, req->assoclen);
348         return do_decrypt ? crypto_aead_decrypt(subreq) :
349                             crypto_aead_encrypt(subreq);
350 }
351
352 static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
353                                          struct crypto_aead *cipher,
354                                          const u8 *key,
355                                          unsigned int keylen)
356 {
357         crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
358         crypto_aead_set_flags(ctx->sw_cipher.aead,
359                 crypto_aead_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
360         return crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
361 }
362
363 /*
364  * AES-CCM Functions
365  */
366
367 int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, const u8 *key,
368                              unsigned int keylen)
369 {
370         struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
371         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
372         struct dynamic_sa_ctl *sa;
373         int rc = 0;
374
375         rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
376         if (rc)
377                 return rc;
378
379         if (ctx->sa_in || ctx->sa_out)
380                 crypto4xx_free_sa(ctx);
381
382         rc = crypto4xx_alloc_sa(ctx, SA_AES128_CCM_LEN + (keylen - 16) / 4);
383         if (rc)
384                 return rc;
385
386         /* Setup SA */
387         sa = (struct dynamic_sa_ctl *) ctx->sa_in;
388         sa->sa_contents.w = SA_AES_CCM_CONTENTS | (keylen << 2);
389
390         set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
391                                  SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
392                                  SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
393                                  SA_CIPHER_ALG_AES,
394                                  SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
395                                  SA_OPCODE_HASH_DECRYPT, DIR_INBOUND);
396
397         set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
398                                  CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
399                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
400                                  SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
401                                  SA_NOT_COPY_HDR);
402
403         sa->sa_command_1.bf.key_len = keylen >> 3;
404
405         crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), key, keylen);
406
407         memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
408         sa = (struct dynamic_sa_ctl *) ctx->sa_out;
409
410         set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
411                                  SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
412                                  SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
413                                  SA_CIPHER_ALG_AES,
414                                  SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
415                                  SA_OPCODE_ENCRYPT_HASH, DIR_OUTBOUND);
416
417         set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
418                                  CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
419                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
420                                  SA_COPY_PAD, SA_COPY_PAYLOAD,
421                                  SA_NOT_COPY_HDR);
422
423         sa->sa_command_1.bf.key_len = keylen >> 3;
424         return 0;
425 }
426
427 static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt)
428 {
429         struct crypto4xx_ctx *ctx  = crypto_tfm_ctx(req->base.tfm);
430         struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
431         struct crypto_aead *aead = crypto_aead_reqtfm(req);
432         __le32 iv[16];
433         u32 tmp_sa[SA_AES128_CCM_LEN + 4];
434         struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
435         unsigned int len = req->cryptlen;
436
437         if (decrypt)
438                 len -= crypto_aead_authsize(aead);
439
440         if (crypto4xx_aead_need_fallback(req, len, true, decrypt))
441                 return crypto4xx_aead_fallback(req, ctx, decrypt);
442
443         memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
444         sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2;
445
446         if (req->iv[0] == 1) {
447                 /* CRYPTO_MODE_AES_ICM */
448                 sa->sa_command_1.bf.crypto_mode9_8 = 1;
449         }
450
451         iv[3] = cpu_to_le32(0);
452         crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1));
453
454         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
455                                   len, iv, sizeof(iv),
456                                   sa, ctx->sa_len, req->assoclen, rctx->dst);
457 }
458
459 int crypto4xx_encrypt_aes_ccm(struct aead_request *req)
460 {
461         return crypto4xx_crypt_aes_ccm(req, false);
462 }
463
464 int crypto4xx_decrypt_aes_ccm(struct aead_request *req)
465 {
466         return crypto4xx_crypt_aes_ccm(req, true);
467 }
468
469 int crypto4xx_setauthsize_aead(struct crypto_aead *cipher,
470                                unsigned int authsize)
471 {
472         struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
473         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
474
475         return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize);
476 }
477
478 /*
479  * AES-GCM Functions
480  */
481
482 static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen)
483 {
484         switch (keylen) {
485         case 16:
486         case 24:
487         case 32:
488                 return 0;
489         default:
490                 return -EINVAL;
491         }
492 }
493
494 static int crypto4xx_compute_gcm_hash_key_sw(__le32 *hash_start, const u8 *key,
495                                              unsigned int keylen)
496 {
497         struct crypto_aes_ctx ctx;
498         uint8_t src[16] = { 0 };
499         int rc;
500
501         rc = aes_expandkey(&ctx, key, keylen);
502         if (rc) {
503                 pr_err("aes_expandkey() failed: %d\n", rc);
504                 return rc;
505         }
506
507         aes_encrypt(&ctx, src, src);
508         crypto4xx_memcpy_to_le32(hash_start, src, 16);
509         memzero_explicit(&ctx, sizeof(ctx));
510         return 0;
511 }
512
513 int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
514                              const u8 *key, unsigned int keylen)
515 {
516         struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
517         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
518         struct dynamic_sa_ctl *sa;
519         int    rc = 0;
520
521         if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0)
522                 return -EINVAL;
523
524         rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
525         if (rc)
526                 return rc;
527
528         if (ctx->sa_in || ctx->sa_out)
529                 crypto4xx_free_sa(ctx);
530
531         rc = crypto4xx_alloc_sa(ctx, SA_AES128_GCM_LEN + (keylen - 16) / 4);
532         if (rc)
533                 return rc;
534
535         sa  = (struct dynamic_sa_ctl *) ctx->sa_in;
536
537         sa->sa_contents.w = SA_AES_GCM_CONTENTS | (keylen << 2);
538         set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
539                                  SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
540                                  SA_NO_HEADER_PROC, SA_HASH_ALG_GHASH,
541                                  SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
542                                  SA_OP_GROUP_BASIC, SA_OPCODE_HASH_DECRYPT,
543                                  DIR_INBOUND);
544         set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
545                                  CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
546                                  SA_SEQ_MASK_ON, SA_MC_DISABLE,
547                                  SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
548                                  SA_NOT_COPY_HDR);
549
550         sa->sa_command_1.bf.key_len = keylen >> 3;
551
552         crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
553                                  key, keylen);
554
555         rc = crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa),
556                 key, keylen);
557         if (rc) {
558                 pr_err("GCM hash key setting failed = %d\n", rc);
559                 goto err;
560         }
561
562         memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
563         sa = (struct dynamic_sa_ctl *) ctx->sa_out;
564         sa->sa_command_0.bf.dir = DIR_OUTBOUND;
565         sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT_HASH;
566
567         return 0;
568 err:
569         crypto4xx_free_sa(ctx);
570         return rc;
571 }
572
573 static inline int crypto4xx_crypt_aes_gcm(struct aead_request *req,
574                                           bool decrypt)
575 {
576         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
577         struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
578         __le32 iv[4];
579         unsigned int len = req->cryptlen;
580
581         if (decrypt)
582                 len -= crypto_aead_authsize(crypto_aead_reqtfm(req));
583
584         if (crypto4xx_aead_need_fallback(req, len, false, decrypt))
585                 return crypto4xx_aead_fallback(req, ctx, decrypt);
586
587         crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE);
588         iv[3] = cpu_to_le32(1);
589
590         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
591                                   len, iv, sizeof(iv),
592                                   decrypt ? ctx->sa_in : ctx->sa_out,
593                                   ctx->sa_len, req->assoclen, rctx->dst);
594 }
595
596 int crypto4xx_encrypt_aes_gcm(struct aead_request *req)
597 {
598         return crypto4xx_crypt_aes_gcm(req, false);
599 }
600
601 int crypto4xx_decrypt_aes_gcm(struct aead_request *req)
602 {
603         return crypto4xx_crypt_aes_gcm(req, true);
604 }
605
606 /*
607  * HASH SHA1 Functions
608  */
609 static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
610                                    unsigned int sa_len,
611                                    unsigned char ha,
612                                    unsigned char hm)
613 {
614         struct crypto_alg *alg = tfm->__crt_alg;
615         struct crypto4xx_alg *my_alg;
616         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
617         struct dynamic_sa_hash160 *sa;
618         int rc;
619
620         my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
621                               alg.u.hash);
622         ctx->dev   = my_alg->dev;
623
624         /* Create SA */
625         if (ctx->sa_in || ctx->sa_out)
626                 crypto4xx_free_sa(ctx);
627
628         rc = crypto4xx_alloc_sa(ctx, sa_len);
629         if (rc)
630                 return rc;
631
632         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
633                                  sizeof(struct crypto4xx_ctx));
634         sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
635         set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV,
636                                  SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
637                                  SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL,
638                                  SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
639                                  SA_OPCODE_HASH, DIR_INBOUND);
640         set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH,
641                                  CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
642                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
643                                  SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
644                                  SA_NOT_COPY_HDR);
645         /* Need to zero hash digest in SA */
646         memset(sa->inner_digest, 0, sizeof(sa->inner_digest));
647         memset(sa->outer_digest, 0, sizeof(sa->outer_digest));
648
649         return 0;
650 }
651
652 int crypto4xx_hash_init(struct ahash_request *req)
653 {
654         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
655         int ds;
656         struct dynamic_sa_ctl *sa;
657
658         sa = ctx->sa_in;
659         ds = crypto_ahash_digestsize(
660                         __crypto_ahash_cast(req->base.tfm));
661         sa->sa_command_0.bf.digest_len = ds >> 2;
662         sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA;
663
664         return 0;
665 }
666
667 int crypto4xx_hash_update(struct ahash_request *req)
668 {
669         struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
670         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
671         struct scatterlist dst;
672         unsigned int ds = crypto_ahash_digestsize(ahash);
673
674         sg_init_one(&dst, req->result, ds);
675
676         return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
677                                   req->nbytes, NULL, 0, ctx->sa_in,
678                                   ctx->sa_len, 0, NULL);
679 }
680
681 int crypto4xx_hash_final(struct ahash_request *req)
682 {
683         return 0;
684 }
685
686 int crypto4xx_hash_digest(struct ahash_request *req)
687 {
688         struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
689         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
690         struct scatterlist dst;
691         unsigned int ds = crypto_ahash_digestsize(ahash);
692
693         sg_init_one(&dst, req->result, ds);
694
695         return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
696                                   req->nbytes, NULL, 0, ctx->sa_in,
697                                   ctx->sa_len, 0, NULL);
698 }
699
700 /*
701  * SHA1 Algorithm
702  */
703 int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm)
704 {
705         return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
706                                        SA_HASH_MODE_HASH);
707 }