1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * talitos - Freescale Integrated Security Engine (SEC) device driver
5 * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
7 * Scatterlist Crypto API glue code copied from files with the following:
8 * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
10 * Crypto algorithm registration code copied from hifn driver:
11 * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
12 * All rights reserved.
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/mod_devicetable.h>
18 #include <linux/device.h>
19 #include <linux/interrupt.h>
20 #include <linux/crypto.h>
21 #include <linux/hw_random.h>
22 #include <linux/of_address.h>
23 #include <linux/of_irq.h>
24 #include <linux/of_platform.h>
25 #include <linux/dma-mapping.h>
27 #include <linux/spinlock.h>
28 #include <linux/rtnetlink.h>
29 #include <linux/slab.h>
31 #include <crypto/algapi.h>
32 #include <crypto/aes.h>
33 #include <crypto/internal/des.h>
34 #include <crypto/sha.h>
35 #include <crypto/md5.h>
36 #include <crypto/internal/aead.h>
37 #include <crypto/authenc.h>
38 #include <crypto/internal/skcipher.h>
39 #include <crypto/hash.h>
40 #include <crypto/internal/hash.h>
41 #include <crypto/scatterwalk.h>
45 static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
46 unsigned int len, bool is_sec1)
48 ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
50 ptr->len1 = cpu_to_be16(len);
52 ptr->len = cpu_to_be16(len);
53 ptr->eptr = upper_32_bits(dma_addr);
57 static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
58 struct talitos_ptr *src_ptr, bool is_sec1)
60 dst_ptr->ptr = src_ptr->ptr;
62 dst_ptr->len1 = src_ptr->len1;
64 dst_ptr->len = src_ptr->len;
65 dst_ptr->eptr = src_ptr->eptr;
69 static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
73 return be16_to_cpu(ptr->len1);
75 return be16_to_cpu(ptr->len);
78 static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
85 static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
92 * map virtual single (contiguous) pointer to h/w descriptor pointer
94 static void __map_single_talitos_ptr(struct device *dev,
95 struct talitos_ptr *ptr,
96 unsigned int len, void *data,
97 enum dma_data_direction dir,
100 dma_addr_t dma_addr = dma_map_single_attrs(dev, data, len, dir, attrs);
101 struct talitos_private *priv = dev_get_drvdata(dev);
102 bool is_sec1 = has_ftr_sec1(priv);
104 to_talitos_ptr(ptr, dma_addr, len, is_sec1);
107 static void map_single_talitos_ptr(struct device *dev,
108 struct talitos_ptr *ptr,
109 unsigned int len, void *data,
110 enum dma_data_direction dir)
112 __map_single_talitos_ptr(dev, ptr, len, data, dir, 0);
115 static void map_single_talitos_ptr_nosync(struct device *dev,
116 struct talitos_ptr *ptr,
117 unsigned int len, void *data,
118 enum dma_data_direction dir)
120 __map_single_talitos_ptr(dev, ptr, len, data, dir,
121 DMA_ATTR_SKIP_CPU_SYNC);
125 * unmap bus single (contiguous) h/w descriptor pointer
127 static void unmap_single_talitos_ptr(struct device *dev,
128 struct talitos_ptr *ptr,
129 enum dma_data_direction dir)
131 struct talitos_private *priv = dev_get_drvdata(dev);
132 bool is_sec1 = has_ftr_sec1(priv);
134 dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
135 from_talitos_ptr_len(ptr, is_sec1), dir);
138 static int reset_channel(struct device *dev, int ch)
140 struct talitos_private *priv = dev_get_drvdata(dev);
141 unsigned int timeout = TALITOS_TIMEOUT;
142 bool is_sec1 = has_ftr_sec1(priv);
145 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
146 TALITOS1_CCCR_LO_RESET);
148 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
149 TALITOS1_CCCR_LO_RESET) && --timeout)
152 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
153 TALITOS2_CCCR_RESET);
155 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
156 TALITOS2_CCCR_RESET) && --timeout)
161 dev_err(dev, "failed to reset channel %d\n", ch);
165 /* set 36-bit addressing, done writeback enable and done IRQ enable */
166 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
167 TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
168 /* enable chaining descriptors */
170 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
173 /* and ICCR writeback, if available */
174 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
175 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
176 TALITOS_CCCR_LO_IWSE);
181 static int reset_device(struct device *dev)
183 struct talitos_private *priv = dev_get_drvdata(dev);
184 unsigned int timeout = TALITOS_TIMEOUT;
185 bool is_sec1 = has_ftr_sec1(priv);
186 u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
188 setbits32(priv->reg + TALITOS_MCR, mcr);
190 while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
195 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
196 setbits32(priv->reg + TALITOS_MCR, mcr);
200 dev_err(dev, "failed to reset device\n");
208 * Reset and initialize the device
210 static int init_device(struct device *dev)
212 struct talitos_private *priv = dev_get_drvdata(dev);
214 bool is_sec1 = has_ftr_sec1(priv);
218 * errata documentation: warning: certain SEC interrupts
219 * are not fully cleared by writing the MCR:SWR bit,
220 * set bit twice to completely reset
222 err = reset_device(dev);
226 err = reset_device(dev);
231 for (ch = 0; ch < priv->num_channels; ch++) {
232 err = reset_channel(dev, ch);
237 /* enable channel done and error interrupts */
239 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
240 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
241 /* disable parity error check in DEU (erroneous? test vect.) */
242 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
244 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
245 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
248 /* disable integrity check error interrupts (use writeback instead) */
249 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
250 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
251 TALITOS_MDEUICR_LO_ICE);
257 * talitos_submit - submits a descriptor to the device for processing
258 * @dev: the SEC device to be used
259 * @ch: the SEC device channel to be used
260 * @desc: the descriptor to be processed by the device
261 * @callback: whom to call when processing is complete
262 * @context: a handle for use by caller (optional)
264 * desc must contain valid dma-mapped (bus physical) address pointers.
265 * callback must check err and feedback in descriptor header
266 * for device processing status.
268 static int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
269 void (*callback)(struct device *dev,
270 struct talitos_desc *desc,
271 void *context, int error),
274 struct talitos_private *priv = dev_get_drvdata(dev);
275 struct talitos_request *request;
278 bool is_sec1 = has_ftr_sec1(priv);
280 spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
282 if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
283 /* h/w fifo is full */
284 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
288 head = priv->chan[ch].head;
289 request = &priv->chan[ch].fifo[head];
291 /* map descriptor and save caller data */
293 desc->hdr1 = desc->hdr;
294 request->dma_desc = dma_map_single(dev, &desc->hdr1,
298 request->dma_desc = dma_map_single(dev, desc,
302 request->callback = callback;
303 request->context = context;
305 /* increment fifo head */
306 priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
309 request->desc = desc;
313 out_be32(priv->chan[ch].reg + TALITOS_FF,
314 upper_32_bits(request->dma_desc));
315 out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
316 lower_32_bits(request->dma_desc));
318 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
323 static __be32 get_request_hdr(struct talitos_request *request, bool is_sec1)
325 struct talitos_edesc *edesc;
328 return request->desc->hdr;
330 if (!request->desc->next_desc)
331 return request->desc->hdr1;
333 edesc = container_of(request->desc, struct talitos_edesc, desc);
335 return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1;
339 * process what was done, notify callback of error if not
341 static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
343 struct talitos_private *priv = dev_get_drvdata(dev);
344 struct talitos_request *request, saved_req;
347 bool is_sec1 = has_ftr_sec1(priv);
349 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
351 tail = priv->chan[ch].tail;
352 while (priv->chan[ch].fifo[tail].desc) {
355 request = &priv->chan[ch].fifo[tail];
357 /* descriptors with their done bits set don't get the error */
359 hdr = get_request_hdr(request, is_sec1);
361 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
369 dma_unmap_single(dev, request->dma_desc,
373 /* copy entries so we can call callback outside lock */
374 saved_req.desc = request->desc;
375 saved_req.callback = request->callback;
376 saved_req.context = request->context;
378 /* release request entry in fifo */
380 request->desc = NULL;
382 /* increment fifo tail */
383 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
385 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
387 atomic_dec(&priv->chan[ch].submit_count);
389 saved_req.callback(dev, saved_req.desc, saved_req.context,
391 /* channel may resume processing in single desc error case */
392 if (error && !reset_ch && status == error)
394 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
395 tail = priv->chan[ch].tail;
398 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
402 * process completed requests for channels that have done status
404 #define DEF_TALITOS1_DONE(name, ch_done_mask) \
405 static void talitos1_done_##name(unsigned long data) \
407 struct device *dev = (struct device *)data; \
408 struct talitos_private *priv = dev_get_drvdata(dev); \
409 unsigned long flags; \
411 if (ch_done_mask & 0x10000000) \
412 flush_channel(dev, 0, 0, 0); \
413 if (ch_done_mask & 0x40000000) \
414 flush_channel(dev, 1, 0, 0); \
415 if (ch_done_mask & 0x00010000) \
416 flush_channel(dev, 2, 0, 0); \
417 if (ch_done_mask & 0x00040000) \
418 flush_channel(dev, 3, 0, 0); \
420 /* At this point, all completed channels have been processed */ \
421 /* Unmask done interrupts for channels completed later on. */ \
422 spin_lock_irqsave(&priv->reg_lock, flags); \
423 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
424 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
425 spin_unlock_irqrestore(&priv->reg_lock, flags); \
428 DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
429 DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
431 #define DEF_TALITOS2_DONE(name, ch_done_mask) \
432 static void talitos2_done_##name(unsigned long data) \
434 struct device *dev = (struct device *)data; \
435 struct talitos_private *priv = dev_get_drvdata(dev); \
436 unsigned long flags; \
438 if (ch_done_mask & 1) \
439 flush_channel(dev, 0, 0, 0); \
440 if (ch_done_mask & (1 << 2)) \
441 flush_channel(dev, 1, 0, 0); \
442 if (ch_done_mask & (1 << 4)) \
443 flush_channel(dev, 2, 0, 0); \
444 if (ch_done_mask & (1 << 6)) \
445 flush_channel(dev, 3, 0, 0); \
447 /* At this point, all completed channels have been processed */ \
448 /* Unmask done interrupts for channels completed later on. */ \
449 spin_lock_irqsave(&priv->reg_lock, flags); \
450 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
451 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
452 spin_unlock_irqrestore(&priv->reg_lock, flags); \
455 DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
456 DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
457 DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
458 DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
461 * locate current (offending) descriptor
463 static __be32 current_desc_hdr(struct device *dev, int ch)
465 struct talitos_private *priv = dev_get_drvdata(dev);
469 cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
470 cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
473 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
477 tail = priv->chan[ch].tail;
480 while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
481 priv->chan[ch].fifo[iter].desc->next_desc != cpu_to_be32(cur_desc)) {
482 iter = (iter + 1) & (priv->fifo_len - 1);
484 dev_err(dev, "couldn't locate current descriptor\n");
489 if (priv->chan[ch].fifo[iter].desc->next_desc == cpu_to_be32(cur_desc)) {
490 struct talitos_edesc *edesc;
492 edesc = container_of(priv->chan[ch].fifo[iter].desc,
493 struct talitos_edesc, desc);
494 return ((struct talitos_desc *)
495 (edesc->buf + edesc->dma_len))->hdr;
498 return priv->chan[ch].fifo[iter].desc->hdr;
502 * user diagnostics; report root cause of error based on execution unit status
504 static void report_eu_error(struct device *dev, int ch, __be32 desc_hdr)
506 struct talitos_private *priv = dev_get_drvdata(dev);
510 desc_hdr = cpu_to_be32(in_be32(priv->chan[ch].reg + TALITOS_DESCBUF));
512 switch (desc_hdr & DESC_HDR_SEL0_MASK) {
513 case DESC_HDR_SEL0_AFEU:
514 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
515 in_be32(priv->reg_afeu + TALITOS_EUISR),
516 in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
518 case DESC_HDR_SEL0_DEU:
519 dev_err(dev, "DEUISR 0x%08x_%08x\n",
520 in_be32(priv->reg_deu + TALITOS_EUISR),
521 in_be32(priv->reg_deu + TALITOS_EUISR_LO));
523 case DESC_HDR_SEL0_MDEUA:
524 case DESC_HDR_SEL0_MDEUB:
525 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
526 in_be32(priv->reg_mdeu + TALITOS_EUISR),
527 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
529 case DESC_HDR_SEL0_RNG:
530 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
531 in_be32(priv->reg_rngu + TALITOS_ISR),
532 in_be32(priv->reg_rngu + TALITOS_ISR_LO));
534 case DESC_HDR_SEL0_PKEU:
535 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
536 in_be32(priv->reg_pkeu + TALITOS_EUISR),
537 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
539 case DESC_HDR_SEL0_AESU:
540 dev_err(dev, "AESUISR 0x%08x_%08x\n",
541 in_be32(priv->reg_aesu + TALITOS_EUISR),
542 in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
544 case DESC_HDR_SEL0_CRCU:
545 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
546 in_be32(priv->reg_crcu + TALITOS_EUISR),
547 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
549 case DESC_HDR_SEL0_KEU:
550 dev_err(dev, "KEUISR 0x%08x_%08x\n",
551 in_be32(priv->reg_pkeu + TALITOS_EUISR),
552 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
556 switch (desc_hdr & DESC_HDR_SEL1_MASK) {
557 case DESC_HDR_SEL1_MDEUA:
558 case DESC_HDR_SEL1_MDEUB:
559 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
560 in_be32(priv->reg_mdeu + TALITOS_EUISR),
561 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
563 case DESC_HDR_SEL1_CRCU:
564 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
565 in_be32(priv->reg_crcu + TALITOS_EUISR),
566 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
570 for (i = 0; i < 8; i++)
571 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
572 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
573 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
577 * recover from error interrupts
579 static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
581 struct talitos_private *priv = dev_get_drvdata(dev);
582 unsigned int timeout = TALITOS_TIMEOUT;
583 int ch, error, reset_dev = 0;
585 bool is_sec1 = has_ftr_sec1(priv);
586 int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
588 for (ch = 0; ch < priv->num_channels; ch++) {
589 /* skip channels without errors */
591 /* bits 29, 31, 17, 19 */
592 if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
595 if (!(isr & (1 << (ch * 2 + 1))))
601 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
603 if (v_lo & TALITOS_CCPSR_LO_DOF) {
604 dev_err(dev, "double fetch fifo overflow error\n");
608 if (v_lo & TALITOS_CCPSR_LO_SOF) {
609 /* h/w dropped descriptor */
610 dev_err(dev, "single fetch fifo overflow error\n");
613 if (v_lo & TALITOS_CCPSR_LO_MDTE)
614 dev_err(dev, "master data transfer error\n");
615 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
616 dev_err(dev, is_sec1 ? "pointer not complete error\n"
617 : "s/g data length zero error\n");
618 if (v_lo & TALITOS_CCPSR_LO_FPZ)
619 dev_err(dev, is_sec1 ? "parity error\n"
620 : "fetch pointer zero error\n");
621 if (v_lo & TALITOS_CCPSR_LO_IDH)
622 dev_err(dev, "illegal descriptor header error\n");
623 if (v_lo & TALITOS_CCPSR_LO_IEU)
624 dev_err(dev, is_sec1 ? "static assignment error\n"
625 : "invalid exec unit error\n");
626 if (v_lo & TALITOS_CCPSR_LO_EU)
627 report_eu_error(dev, ch, current_desc_hdr(dev, ch));
629 if (v_lo & TALITOS_CCPSR_LO_GB)
630 dev_err(dev, "gather boundary error\n");
631 if (v_lo & TALITOS_CCPSR_LO_GRL)
632 dev_err(dev, "gather return/length error\n");
633 if (v_lo & TALITOS_CCPSR_LO_SB)
634 dev_err(dev, "scatter boundary error\n");
635 if (v_lo & TALITOS_CCPSR_LO_SRL)
636 dev_err(dev, "scatter return/length error\n");
639 flush_channel(dev, ch, error, reset_ch);
642 reset_channel(dev, ch);
644 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
646 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
647 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
648 TALITOS2_CCCR_CONT) && --timeout)
651 dev_err(dev, "failed to restart channel %d\n",
657 if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
658 (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
659 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
660 dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
663 dev_err(dev, "done overflow, internal time out, or "
664 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
666 /* purge request queues */
667 for (ch = 0; ch < priv->num_channels; ch++)
668 flush_channel(dev, ch, -EIO, 1);
670 /* reset and reinitialize the device */
675 #define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
676 static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
678 struct device *dev = data; \
679 struct talitos_private *priv = dev_get_drvdata(dev); \
681 unsigned long flags; \
683 spin_lock_irqsave(&priv->reg_lock, flags); \
684 isr = in_be32(priv->reg + TALITOS_ISR); \
685 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
686 /* Acknowledge interrupt */ \
687 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
688 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
690 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
691 spin_unlock_irqrestore(&priv->reg_lock, flags); \
692 talitos_error(dev, isr & ch_err_mask, isr_lo); \
695 if (likely(isr & ch_done_mask)) { \
696 /* mask further done interrupts. */ \
697 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
698 /* done_task will unmask done interrupts at exit */ \
699 tasklet_schedule(&priv->done_task[tlet]); \
701 spin_unlock_irqrestore(&priv->reg_lock, flags); \
704 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
708 DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
710 #define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
711 static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
713 struct device *dev = data; \
714 struct talitos_private *priv = dev_get_drvdata(dev); \
716 unsigned long flags; \
718 spin_lock_irqsave(&priv->reg_lock, flags); \
719 isr = in_be32(priv->reg + TALITOS_ISR); \
720 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
721 /* Acknowledge interrupt */ \
722 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
723 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
725 if (unlikely(isr & ch_err_mask || isr_lo)) { \
726 spin_unlock_irqrestore(&priv->reg_lock, flags); \
727 talitos_error(dev, isr & ch_err_mask, isr_lo); \
730 if (likely(isr & ch_done_mask)) { \
731 /* mask further done interrupts. */ \
732 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
733 /* done_task will unmask done interrupts at exit */ \
734 tasklet_schedule(&priv->done_task[tlet]); \
736 spin_unlock_irqrestore(&priv->reg_lock, flags); \
739 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
743 DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
744 DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
746 DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
752 static int talitos_rng_data_present(struct hwrng *rng, int wait)
754 struct device *dev = (struct device *)rng->priv;
755 struct talitos_private *priv = dev_get_drvdata(dev);
759 for (i = 0; i < 20; i++) {
760 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
761 TALITOS_RNGUSR_LO_OFL;
770 static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
772 struct device *dev = (struct device *)rng->priv;
773 struct talitos_private *priv = dev_get_drvdata(dev);
775 /* rng fifo requires 64-bit accesses */
776 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
777 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
782 static int talitos_rng_init(struct hwrng *rng)
784 struct device *dev = (struct device *)rng->priv;
785 struct talitos_private *priv = dev_get_drvdata(dev);
786 unsigned int timeout = TALITOS_TIMEOUT;
788 setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
789 while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
790 & TALITOS_RNGUSR_LO_RD)
794 dev_err(dev, "failed to reset rng hw\n");
798 /* start generating */
799 setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
804 static int talitos_register_rng(struct device *dev)
806 struct talitos_private *priv = dev_get_drvdata(dev);
809 priv->rng.name = dev_driver_string(dev);
810 priv->rng.init = talitos_rng_init;
811 priv->rng.data_present = talitos_rng_data_present;
812 priv->rng.data_read = talitos_rng_data_read;
813 priv->rng.priv = (unsigned long)dev;
815 err = hwrng_register(&priv->rng);
817 priv->rng_registered = true;
822 static void talitos_unregister_rng(struct device *dev)
824 struct talitos_private *priv = dev_get_drvdata(dev);
826 if (!priv->rng_registered)
829 hwrng_unregister(&priv->rng);
830 priv->rng_registered = false;
836 #define TALITOS_CRA_PRIORITY 3000
838 * Defines a priority for doing AEAD with descriptors type
839 * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
841 #define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
842 #ifdef CONFIG_CRYPTO_DEV_TALITOS2
843 #define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
845 #define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA256_BLOCK_SIZE)
847 #define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
852 __be32 desc_hdr_template;
853 u8 key[TALITOS_MAX_KEY_SIZE];
854 u8 iv[TALITOS_MAX_IV_LENGTH];
857 unsigned int enckeylen;
858 unsigned int authkeylen;
861 #define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
862 #define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
864 struct talitos_ahash_req_ctx {
865 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
866 unsigned int hw_context_size;
867 u8 buf[2][HASH_MAX_BLOCK_SIZE];
872 unsigned int to_hash_later;
874 struct scatterlist bufsl[2];
875 struct scatterlist *psrc;
878 struct talitos_export_state {
879 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
880 u8 buf[HASH_MAX_BLOCK_SIZE];
884 unsigned int to_hash_later;
888 static int aead_setkey(struct crypto_aead *authenc,
889 const u8 *key, unsigned int keylen)
891 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
892 struct device *dev = ctx->dev;
893 struct crypto_authenc_keys keys;
895 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
898 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
902 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
904 memcpy(ctx->key, keys.authkey, keys.authkeylen);
905 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
907 ctx->keylen = keys.authkeylen + keys.enckeylen;
908 ctx->enckeylen = keys.enckeylen;
909 ctx->authkeylen = keys.authkeylen;
910 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
913 memzero_explicit(&keys, sizeof(keys));
917 memzero_explicit(&keys, sizeof(keys));
921 static int aead_des3_setkey(struct crypto_aead *authenc,
922 const u8 *key, unsigned int keylen)
924 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
925 struct device *dev = ctx->dev;
926 struct crypto_authenc_keys keys;
929 err = crypto_authenc_extractkeys(&keys, key, keylen);
934 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
937 err = verify_aead_des3_key(authenc, keys.enckey, keys.enckeylen);
942 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
944 memcpy(ctx->key, keys.authkey, keys.authkeylen);
945 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
947 ctx->keylen = keys.authkeylen + keys.enckeylen;
948 ctx->enckeylen = keys.enckeylen;
949 ctx->authkeylen = keys.authkeylen;
950 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
954 memzero_explicit(&keys, sizeof(keys));
958 static void talitos_sg_unmap(struct device *dev,
959 struct talitos_edesc *edesc,
960 struct scatterlist *src,
961 struct scatterlist *dst,
962 unsigned int len, unsigned int offset)
964 struct talitos_private *priv = dev_get_drvdata(dev);
965 bool is_sec1 = has_ftr_sec1(priv);
966 unsigned int src_nents = edesc->src_nents ? : 1;
967 unsigned int dst_nents = edesc->dst_nents ? : 1;
969 if (is_sec1 && dst && dst_nents > 1) {
970 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
971 len, DMA_FROM_DEVICE);
972 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
976 if (src_nents == 1 || !is_sec1)
977 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
979 if (dst && (dst_nents == 1 || !is_sec1))
980 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
981 } else if (src_nents == 1 || !is_sec1) {
982 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
986 static void ipsec_esp_unmap(struct device *dev,
987 struct talitos_edesc *edesc,
988 struct aead_request *areq, bool encrypt)
990 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
991 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
992 unsigned int ivsize = crypto_aead_ivsize(aead);
993 unsigned int authsize = crypto_aead_authsize(aead);
994 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
995 bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
996 struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
999 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
1001 unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
1003 talitos_sg_unmap(dev, edesc, areq->src, areq->dst,
1004 cryptlen + authsize, areq->assoclen);
1007 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1010 if (!is_ipsec_esp) {
1011 unsigned int dst_nents = edesc->dst_nents ? : 1;
1013 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
1014 areq->assoclen + cryptlen - ivsize);
1019 * ipsec_esp descriptor callbacks
1021 static void ipsec_esp_encrypt_done(struct device *dev,
1022 struct talitos_desc *desc, void *context,
1025 struct aead_request *areq = context;
1026 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1027 unsigned int ivsize = crypto_aead_ivsize(authenc);
1028 struct talitos_edesc *edesc;
1030 edesc = container_of(desc, struct talitos_edesc, desc);
1032 ipsec_esp_unmap(dev, edesc, areq, true);
1034 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1038 aead_request_complete(areq, err);
1041 static void ipsec_esp_decrypt_swauth_done(struct device *dev,
1042 struct talitos_desc *desc,
1043 void *context, int err)
1045 struct aead_request *req = context;
1046 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1047 unsigned int authsize = crypto_aead_authsize(authenc);
1048 struct talitos_edesc *edesc;
1051 edesc = container_of(desc, struct talitos_edesc, desc);
1053 ipsec_esp_unmap(dev, edesc, req, false);
1057 oicv = edesc->buf + edesc->dma_len;
1058 icv = oicv - authsize;
1060 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
1065 aead_request_complete(req, err);
1068 static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
1069 struct talitos_desc *desc,
1070 void *context, int err)
1072 struct aead_request *req = context;
1073 struct talitos_edesc *edesc;
1075 edesc = container_of(desc, struct talitos_edesc, desc);
1077 ipsec_esp_unmap(dev, edesc, req, false);
1079 /* check ICV auth status */
1080 if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1081 DESC_HDR_LO_ICCR1_PASS))
1086 aead_request_complete(req, err);
1090 * convert scatterlist to SEC h/w link table format
1091 * stop at cryptlen bytes
1093 static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
1094 unsigned int offset, int datalen, int elen,
1095 struct talitos_ptr *link_tbl_ptr, int align)
1097 int n_sg = elen ? sg_count + 1 : sg_count;
1099 int cryptlen = datalen + elen;
1100 int padding = ALIGN(cryptlen, align) - cryptlen;
1102 while (cryptlen && sg && n_sg--) {
1103 unsigned int len = sg_dma_len(sg);
1105 if (offset >= len) {
1115 if (datalen > 0 && len > datalen) {
1116 to_talitos_ptr(link_tbl_ptr + count,
1117 sg_dma_address(sg) + offset, datalen, 0);
1118 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1123 to_talitos_ptr(link_tbl_ptr + count,
1124 sg_dma_address(sg) + offset, sg_next(sg) ? len : len + padding, 0);
1125 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1135 /* tag end of link table */
1137 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
1138 DESC_PTR_LNKTBL_RET, 0);
1143 static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1144 unsigned int len, struct talitos_edesc *edesc,
1145 struct talitos_ptr *ptr, int sg_count,
1146 unsigned int offset, int tbl_off, int elen,
1147 bool force, int align)
1149 struct talitos_private *priv = dev_get_drvdata(dev);
1150 bool is_sec1 = has_ftr_sec1(priv);
1151 int aligned_len = ALIGN(len, align);
1154 to_talitos_ptr(ptr, 0, 0, is_sec1);
1157 to_talitos_ptr_ext_set(ptr, elen, is_sec1);
1158 if (sg_count == 1 && !force) {
1159 to_talitos_ptr(ptr, sg_dma_address(src) + offset, aligned_len, is_sec1);
1163 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, aligned_len, is_sec1);
1166 sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len, elen,
1167 &edesc->link_tbl[tbl_off], align);
1168 if (sg_count == 1 && !force) {
1169 /* Only one segment now, so no link tbl needed*/
1170 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1173 to_talitos_ptr(ptr, edesc->dma_link_tbl +
1174 tbl_off * sizeof(struct talitos_ptr), aligned_len, is_sec1);
1175 to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1180 static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1181 unsigned int len, struct talitos_edesc *edesc,
1182 struct talitos_ptr *ptr, int sg_count,
1183 unsigned int offset, int tbl_off)
1185 return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
1186 tbl_off, 0, false, 1);
1190 * fill in and submit ipsec_esp descriptor
1192 static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
1194 void (*callback)(struct device *dev,
1195 struct talitos_desc *desc,
1196 void *context, int error))
1198 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
1199 unsigned int authsize = crypto_aead_authsize(aead);
1200 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1201 struct device *dev = ctx->dev;
1202 struct talitos_desc *desc = &edesc->desc;
1203 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1204 unsigned int ivsize = crypto_aead_ivsize(aead);
1208 bool sync_needed = false;
1209 struct talitos_private *priv = dev_get_drvdata(dev);
1210 bool is_sec1 = has_ftr_sec1(priv);
1211 bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1212 struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1213 struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
1214 dma_addr_t dma_icv = edesc->dma_link_tbl + edesc->dma_len - authsize;
1217 to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
1219 sg_count = edesc->src_nents ?: 1;
1220 if (is_sec1 && sg_count > 1)
1221 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1222 areq->assoclen + cryptlen);
1224 sg_count = dma_map_sg(dev, areq->src, sg_count,
1225 (areq->src == areq->dst) ?
1226 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1229 ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1230 &desc->ptr[1], sg_count, 0, tbl_off);
1238 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
1241 to_talitos_ptr(ckey_ptr, ctx->dma_key + ctx->authkeylen,
1242 ctx->enckeylen, is_sec1);
1246 * map and adjust cipher len to aead request cryptlen.
1247 * extent is bytes of HMAC postpended to ciphertext,
1248 * typically 12 for ipsec
1250 if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1253 ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
1254 sg_count, areq->assoclen, tbl_off, elen,
1263 if (areq->src != areq->dst) {
1264 sg_count = edesc->dst_nents ? : 1;
1265 if (!is_sec1 || sg_count == 1)
1266 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1269 if (is_ipsec_esp && encrypt)
1273 ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1274 sg_count, areq->assoclen, tbl_off, elen,
1275 is_ipsec_esp && !encrypt, 1);
1278 if (!encrypt && is_ipsec_esp) {
1279 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
1281 /* Add an entry to the link table for ICV data */
1282 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
1283 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RET, is_sec1);
1285 /* icv data follows link tables */
1286 to_talitos_ptr(tbl_ptr, dma_icv, authsize, is_sec1);
1287 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
1289 } else if (!encrypt) {
1290 to_talitos_ptr(&desc->ptr[6], dma_icv, authsize, is_sec1);
1292 } else if (!is_ipsec_esp) {
1293 talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6],
1294 sg_count, areq->assoclen + cryptlen, tbl_off);
1299 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1303 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1307 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1308 if (ret != -EINPROGRESS) {
1309 ipsec_esp_unmap(dev, edesc, areq, encrypt);
1316 * allocate and map the extended descriptor
1318 static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1319 struct scatterlist *src,
1320 struct scatterlist *dst,
1322 unsigned int assoclen,
1323 unsigned int cryptlen,
1324 unsigned int authsize,
1325 unsigned int ivsize,
1330 struct talitos_edesc *edesc;
1331 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
1332 dma_addr_t iv_dma = 0;
1333 gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1335 struct talitos_private *priv = dev_get_drvdata(dev);
1336 bool is_sec1 = has_ftr_sec1(priv);
1337 int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
1339 if (cryptlen + authsize > max_len) {
1340 dev_err(dev, "length exceeds h/w max limit\n");
1341 return ERR_PTR(-EINVAL);
1344 if (!dst || dst == src) {
1345 src_len = assoclen + cryptlen + authsize;
1346 src_nents = sg_nents_for_len(src, src_len);
1347 if (src_nents < 0) {
1348 dev_err(dev, "Invalid number of src SG.\n");
1349 return ERR_PTR(-EINVAL);
1351 src_nents = (src_nents == 1) ? 0 : src_nents;
1352 dst_nents = dst ? src_nents : 0;
1354 } else { /* dst && dst != src*/
1355 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1356 src_nents = sg_nents_for_len(src, src_len);
1357 if (src_nents < 0) {
1358 dev_err(dev, "Invalid number of src SG.\n");
1359 return ERR_PTR(-EINVAL);
1361 src_nents = (src_nents == 1) ? 0 : src_nents;
1362 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1363 dst_nents = sg_nents_for_len(dst, dst_len);
1364 if (dst_nents < 0) {
1365 dev_err(dev, "Invalid number of dst SG.\n");
1366 return ERR_PTR(-EINVAL);
1368 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
1372 * allocate space for base edesc plus the link tables,
1373 * allowing for two separate entries for AD and generated ICV (+ 2),
1374 * and space for two sets of ICVs (stashed and generated)
1376 alloc_len = sizeof(struct talitos_edesc);
1377 if (src_nents || dst_nents || !encrypt) {
1379 dma_len = (src_nents ? src_len : 0) +
1380 (dst_nents ? dst_len : 0) + authsize;
1382 dma_len = (src_nents + dst_nents + 2) *
1383 sizeof(struct talitos_ptr) + authsize;
1384 alloc_len += dma_len;
1388 alloc_len += icv_stashing ? authsize : 0;
1390 /* if its a ahash, add space for a second desc next to the first one */
1391 if (is_sec1 && !dst)
1392 alloc_len += sizeof(struct talitos_desc);
1393 alloc_len += ivsize;
1395 edesc = kmalloc(alloc_len, GFP_DMA | flags);
1397 return ERR_PTR(-ENOMEM);
1399 iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize);
1400 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1402 memset(&edesc->desc, 0, sizeof(edesc->desc));
1404 edesc->src_nents = src_nents;
1405 edesc->dst_nents = dst_nents;
1406 edesc->iv_dma = iv_dma;
1407 edesc->dma_len = dma_len;
1409 edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0],
1416 static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
1417 int icv_stashing, bool encrypt)
1419 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1420 unsigned int authsize = crypto_aead_authsize(authenc);
1421 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1422 unsigned int ivsize = crypto_aead_ivsize(authenc);
1423 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1425 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1426 iv, areq->assoclen, cryptlen,
1427 authsize, ivsize, icv_stashing,
1428 areq->base.flags, encrypt);
1431 static int aead_encrypt(struct aead_request *req)
1433 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1434 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1435 struct talitos_edesc *edesc;
1437 /* allocate extended descriptor */
1438 edesc = aead_edesc_alloc(req, req->iv, 0, true);
1440 return PTR_ERR(edesc);
1443 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1445 return ipsec_esp(edesc, req, true, ipsec_esp_encrypt_done);
1448 static int aead_decrypt(struct aead_request *req)
1450 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1451 unsigned int authsize = crypto_aead_authsize(authenc);
1452 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1453 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1454 struct talitos_edesc *edesc;
1457 /* allocate extended descriptor */
1458 edesc = aead_edesc_alloc(req, req->iv, 1, false);
1460 return PTR_ERR(edesc);
1462 if ((edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP) &&
1463 (priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
1464 ((!edesc->src_nents && !edesc->dst_nents) ||
1465 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
1467 /* decrypt and check the ICV */
1468 edesc->desc.hdr = ctx->desc_hdr_template |
1469 DESC_HDR_DIR_INBOUND |
1470 DESC_HDR_MODE1_MDEU_CICV;
1472 /* reset integrity check result bits */
1474 return ipsec_esp(edesc, req, false,
1475 ipsec_esp_decrypt_hwauth_done);
1478 /* Have to check the ICV with software */
1479 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1481 /* stash incoming ICV for later cmp with ICV generated by the h/w */
1482 icvdata = edesc->buf + edesc->dma_len;
1484 sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize,
1485 req->assoclen + req->cryptlen - authsize);
1487 return ipsec_esp(edesc, req, false, ipsec_esp_decrypt_swauth_done);
1490 static int skcipher_setkey(struct crypto_skcipher *cipher,
1491 const u8 *key, unsigned int keylen)
1493 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1494 struct device *dev = ctx->dev;
1497 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1499 memcpy(&ctx->key, key, keylen);
1500 ctx->keylen = keylen;
1502 ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1507 static int skcipher_des_setkey(struct crypto_skcipher *cipher,
1508 const u8 *key, unsigned int keylen)
1510 return verify_skcipher_des_key(cipher, key) ?:
1511 skcipher_setkey(cipher, key, keylen);
1514 static int skcipher_des3_setkey(struct crypto_skcipher *cipher,
1515 const u8 *key, unsigned int keylen)
1517 return verify_skcipher_des3_key(cipher, key) ?:
1518 skcipher_setkey(cipher, key, keylen);
1521 static int skcipher_aes_setkey(struct crypto_skcipher *cipher,
1522 const u8 *key, unsigned int keylen)
1524 if (keylen == AES_KEYSIZE_128 || keylen == AES_KEYSIZE_192 ||
1525 keylen == AES_KEYSIZE_256)
1526 return skcipher_setkey(cipher, key, keylen);
1531 static void common_nonsnoop_unmap(struct device *dev,
1532 struct talitos_edesc *edesc,
1533 struct skcipher_request *areq)
1535 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1537 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->cryptlen, 0);
1538 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1541 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1545 static void skcipher_done(struct device *dev,
1546 struct talitos_desc *desc, void *context,
1549 struct skcipher_request *areq = context;
1550 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1551 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1552 unsigned int ivsize = crypto_skcipher_ivsize(cipher);
1553 struct talitos_edesc *edesc;
1555 edesc = container_of(desc, struct talitos_edesc, desc);
1557 common_nonsnoop_unmap(dev, edesc, areq);
1558 memcpy(areq->iv, ctx->iv, ivsize);
1562 areq->base.complete(&areq->base, err);
1565 static int common_nonsnoop(struct talitos_edesc *edesc,
1566 struct skcipher_request *areq,
1567 void (*callback) (struct device *dev,
1568 struct talitos_desc *desc,
1569 void *context, int error))
1571 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1572 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1573 struct device *dev = ctx->dev;
1574 struct talitos_desc *desc = &edesc->desc;
1575 unsigned int cryptlen = areq->cryptlen;
1576 unsigned int ivsize = crypto_skcipher_ivsize(cipher);
1578 bool sync_needed = false;
1579 struct talitos_private *priv = dev_get_drvdata(dev);
1580 bool is_sec1 = has_ftr_sec1(priv);
1581 bool is_ctr = (desc->hdr & DESC_HDR_SEL0_MASK) == DESC_HDR_SEL0_AESU &&
1582 (desc->hdr & DESC_HDR_MODE0_AESU_MASK) == DESC_HDR_MODE0_AESU_CTR;
1584 /* first DWORD empty */
1587 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
1590 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
1592 sg_count = edesc->src_nents ?: 1;
1593 if (is_sec1 && sg_count > 1)
1594 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1597 sg_count = dma_map_sg(dev, areq->src, sg_count,
1598 (areq->src == areq->dst) ?
1599 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1603 sg_count = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[3],
1604 sg_count, 0, 0, 0, false, is_ctr ? 16 : 1);
1609 if (areq->src != areq->dst) {
1610 sg_count = edesc->dst_nents ? : 1;
1611 if (!is_sec1 || sg_count == 1)
1612 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1615 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1616 sg_count, 0, (edesc->src_nents + 1));
1621 map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
1624 /* last DWORD empty */
1627 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1628 edesc->dma_len, DMA_BIDIRECTIONAL);
1630 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1631 if (ret != -EINPROGRESS) {
1632 common_nonsnoop_unmap(dev, edesc, areq);
1638 static struct talitos_edesc *skcipher_edesc_alloc(struct skcipher_request *
1641 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1642 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1643 unsigned int ivsize = crypto_skcipher_ivsize(cipher);
1645 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1646 areq->iv, 0, areq->cryptlen, 0, ivsize, 0,
1647 areq->base.flags, encrypt);
1650 static int skcipher_encrypt(struct skcipher_request *areq)
1652 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1653 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1654 struct talitos_edesc *edesc;
1655 unsigned int blocksize =
1656 crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher));
1658 if (!areq->cryptlen)
1661 if (areq->cryptlen % blocksize)
1664 /* allocate extended descriptor */
1665 edesc = skcipher_edesc_alloc(areq, true);
1667 return PTR_ERR(edesc);
1670 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1672 return common_nonsnoop(edesc, areq, skcipher_done);
1675 static int skcipher_decrypt(struct skcipher_request *areq)
1677 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1678 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1679 struct talitos_edesc *edesc;
1680 unsigned int blocksize =
1681 crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher));
1683 if (!areq->cryptlen)
1686 if (areq->cryptlen % blocksize)
1689 /* allocate extended descriptor */
1690 edesc = skcipher_edesc_alloc(areq, false);
1692 return PTR_ERR(edesc);
1694 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1696 return common_nonsnoop(edesc, areq, skcipher_done);
1699 static void common_nonsnoop_hash_unmap(struct device *dev,
1700 struct talitos_edesc *edesc,
1701 struct ahash_request *areq)
1703 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1704 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1705 struct talitos_private *priv = dev_get_drvdata(dev);
1706 bool is_sec1 = has_ftr_sec1(priv);
1707 struct talitos_desc *desc = &edesc->desc;
1708 struct talitos_desc *desc2 = (struct talitos_desc *)
1709 (edesc->buf + edesc->dma_len);
1711 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1712 if (desc->next_desc &&
1713 desc->ptr[5].ptr != desc2->ptr[5].ptr)
1714 unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
1716 memcpy(areq->result, req_ctx->hw_context,
1717 crypto_ahash_digestsize(tfm));
1720 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
1722 /* When using hashctx-in, must unmap it. */
1723 if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
1724 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1726 else if (desc->next_desc)
1727 unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1730 if (is_sec1 && req_ctx->nbuf)
1731 unmap_single_talitos_ptr(dev, &desc->ptr[3],
1735 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1738 if (edesc->desc.next_desc)
1739 dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1740 TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
1743 static void ahash_done(struct device *dev,
1744 struct talitos_desc *desc, void *context,
1747 struct ahash_request *areq = context;
1748 struct talitos_edesc *edesc =
1749 container_of(desc, struct talitos_edesc, desc);
1750 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1752 if (!req_ctx->last && req_ctx->to_hash_later) {
1753 /* Position any partial block for next update/final/finup */
1754 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
1755 req_ctx->nbuf = req_ctx->to_hash_later;
1757 common_nonsnoop_hash_unmap(dev, edesc, areq);
1761 areq->base.complete(&areq->base, err);
1765 * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1766 * ourself and submit a padded block
1768 static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
1769 struct talitos_edesc *edesc,
1770 struct talitos_ptr *ptr)
1772 static u8 padded_hash[64] = {
1773 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1774 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1775 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1776 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1779 pr_err_once("Bug in SEC1, padding ourself\n");
1780 edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1781 map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1782 (char *)padded_hash, DMA_TO_DEVICE);
1785 static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1786 struct ahash_request *areq, unsigned int length,
1787 void (*callback) (struct device *dev,
1788 struct talitos_desc *desc,
1789 void *context, int error))
1791 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1792 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1793 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1794 struct device *dev = ctx->dev;
1795 struct talitos_desc *desc = &edesc->desc;
1797 bool sync_needed = false;
1798 struct talitos_private *priv = dev_get_drvdata(dev);
1799 bool is_sec1 = has_ftr_sec1(priv);
1802 /* first DWORD empty */
1804 /* hash context in */
1805 if (!req_ctx->first || req_ctx->swinit) {
1806 map_single_talitos_ptr_nosync(dev, &desc->ptr[1],
1807 req_ctx->hw_context_size,
1808 req_ctx->hw_context,
1810 req_ctx->swinit = 0;
1812 /* Indicate next op is not the first. */
1817 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1820 if (is_sec1 && req_ctx->nbuf)
1821 length -= req_ctx->nbuf;
1823 sg_count = edesc->src_nents ?: 1;
1824 if (is_sec1 && sg_count > 1)
1825 sg_copy_to_buffer(req_ctx->psrc, sg_count, edesc->buf, length);
1827 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1832 if (is_sec1 && req_ctx->nbuf) {
1833 map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1834 req_ctx->buf[req_ctx->buf_idx],
1837 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1838 &desc->ptr[3], sg_count, 0, 0);
1843 /* fifth DWORD empty */
1845 /* hash/HMAC out -or- hash context out */
1847 map_single_talitos_ptr(dev, &desc->ptr[5],
1848 crypto_ahash_digestsize(tfm),
1849 req_ctx->hw_context, DMA_FROM_DEVICE);
1851 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1852 req_ctx->hw_context_size,
1853 req_ctx->hw_context,
1856 /* last DWORD empty */
1858 if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1859 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1861 if (is_sec1 && req_ctx->nbuf && length) {
1862 struct talitos_desc *desc2 = (struct talitos_desc *)
1863 (edesc->buf + edesc->dma_len);
1864 dma_addr_t next_desc;
1866 memset(desc2, 0, sizeof(*desc2));
1867 desc2->hdr = desc->hdr;
1868 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1869 desc2->hdr1 = desc2->hdr;
1870 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1871 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1872 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1874 if (desc->ptr[1].ptr)
1875 copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1878 map_single_talitos_ptr_nosync(dev, &desc2->ptr[1],
1879 req_ctx->hw_context_size,
1880 req_ctx->hw_context,
1882 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1883 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1884 &desc2->ptr[3], sg_count, 0, 0);
1887 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1889 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1890 req_ctx->hw_context_size,
1891 req_ctx->hw_context,
1894 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1896 desc->next_desc = cpu_to_be32(next_desc);
1900 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1901 edesc->dma_len, DMA_BIDIRECTIONAL);
1903 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1904 if (ret != -EINPROGRESS) {
1905 common_nonsnoop_hash_unmap(dev, edesc, areq);
1911 static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1912 unsigned int nbytes)
1914 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1915 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1916 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1917 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1918 bool is_sec1 = has_ftr_sec1(priv);
1921 nbytes -= req_ctx->nbuf;
1923 return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
1924 nbytes, 0, 0, 0, areq->base.flags, false);
1927 static int ahash_init(struct ahash_request *areq)
1929 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1930 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1931 struct device *dev = ctx->dev;
1932 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1936 /* Initialize the context */
1937 req_ctx->buf_idx = 0;
1939 req_ctx->first = 1; /* first indicates h/w must init its context */
1940 req_ctx->swinit = 0; /* assume h/w init of context */
1941 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
1942 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1943 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
1944 req_ctx->hw_context_size = size;
1946 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
1948 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
1954 * on h/w without explicit sha224 support, we initialize h/w context
1955 * manually with sha224 constants, and tell it to run sha256.
1957 static int ahash_init_sha224_swinit(struct ahash_request *areq)
1959 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1961 req_ctx->hw_context[0] = SHA224_H0;
1962 req_ctx->hw_context[1] = SHA224_H1;
1963 req_ctx->hw_context[2] = SHA224_H2;
1964 req_ctx->hw_context[3] = SHA224_H3;
1965 req_ctx->hw_context[4] = SHA224_H4;
1966 req_ctx->hw_context[5] = SHA224_H5;
1967 req_ctx->hw_context[6] = SHA224_H6;
1968 req_ctx->hw_context[7] = SHA224_H7;
1970 /* init 64-bit count */
1971 req_ctx->hw_context[8] = 0;
1972 req_ctx->hw_context[9] = 0;
1975 req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
1980 static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
1982 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1983 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1984 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1985 struct talitos_edesc *edesc;
1986 unsigned int blocksize =
1987 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1988 unsigned int nbytes_to_hash;
1989 unsigned int to_hash_later;
1992 struct device *dev = ctx->dev;
1993 struct talitos_private *priv = dev_get_drvdata(dev);
1994 bool is_sec1 = has_ftr_sec1(priv);
1995 u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
1997 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
1998 /* Buffer up to one whole block */
1999 nents = sg_nents_for_len(areq->src, nbytes);
2001 dev_err(ctx->dev, "Invalid number of src SG.\n");
2004 sg_copy_to_buffer(areq->src, nents,
2005 ctx_buf + req_ctx->nbuf, nbytes);
2006 req_ctx->nbuf += nbytes;
2010 /* At least (blocksize + 1) bytes are available to hash */
2011 nbytes_to_hash = nbytes + req_ctx->nbuf;
2012 to_hash_later = nbytes_to_hash & (blocksize - 1);
2016 else if (to_hash_later)
2017 /* There is a partial block. Hash the full block(s) now */
2018 nbytes_to_hash -= to_hash_later;
2020 /* Keep one block buffered */
2021 nbytes_to_hash -= blocksize;
2022 to_hash_later = blocksize;
2025 /* Chain in any previously buffered data */
2026 if (!is_sec1 && req_ctx->nbuf) {
2027 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
2028 sg_init_table(req_ctx->bufsl, nsg);
2029 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
2031 sg_chain(req_ctx->bufsl, 2, areq->src);
2032 req_ctx->psrc = req_ctx->bufsl;
2033 } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
2036 if (nbytes_to_hash > blocksize)
2037 offset = blocksize - req_ctx->nbuf;
2039 offset = nbytes_to_hash - req_ctx->nbuf;
2040 nents = sg_nents_for_len(areq->src, offset);
2042 dev_err(ctx->dev, "Invalid number of src SG.\n");
2045 sg_copy_to_buffer(areq->src, nents,
2046 ctx_buf + req_ctx->nbuf, offset);
2047 req_ctx->nbuf += offset;
2048 req_ctx->psrc = scatterwalk_ffwd(req_ctx->bufsl, areq->src,
2051 req_ctx->psrc = areq->src;
2053 if (to_hash_later) {
2054 nents = sg_nents_for_len(areq->src, nbytes);
2056 dev_err(ctx->dev, "Invalid number of src SG.\n");
2059 sg_pcopy_to_buffer(areq->src, nents,
2060 req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
2062 nbytes - to_hash_later);
2064 req_ctx->to_hash_later = to_hash_later;
2066 /* Allocate extended descriptor */
2067 edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2069 return PTR_ERR(edesc);
2071 edesc->desc.hdr = ctx->desc_hdr_template;
2073 /* On last one, request SEC to pad; otherwise continue */
2075 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2077 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2079 /* request SEC to INIT hash. */
2080 if (req_ctx->first && !req_ctx->swinit)
2081 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2083 /* When the tfm context has a keylen, it's an HMAC.
2084 * A first or last (ie. not middle) descriptor must request HMAC.
2086 if (ctx->keylen && (req_ctx->first || req_ctx->last))
2087 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2089 return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, ahash_done);
2092 static int ahash_update(struct ahash_request *areq)
2094 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2098 return ahash_process_req(areq, areq->nbytes);
2101 static int ahash_final(struct ahash_request *areq)
2103 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2107 return ahash_process_req(areq, 0);
2110 static int ahash_finup(struct ahash_request *areq)
2112 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2116 return ahash_process_req(areq, areq->nbytes);
2119 static int ahash_digest(struct ahash_request *areq)
2121 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2122 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
2127 return ahash_process_req(areq, areq->nbytes);
2130 static int ahash_export(struct ahash_request *areq, void *out)
2132 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2133 struct talitos_export_state *export = out;
2134 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2135 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2136 struct device *dev = ctx->dev;
2139 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2141 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
2143 memcpy(export->hw_context, req_ctx->hw_context,
2144 req_ctx->hw_context_size);
2145 memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
2146 export->swinit = req_ctx->swinit;
2147 export->first = req_ctx->first;
2148 export->last = req_ctx->last;
2149 export->to_hash_later = req_ctx->to_hash_later;
2150 export->nbuf = req_ctx->nbuf;
2155 static int ahash_import(struct ahash_request *areq, const void *in)
2157 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2158 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2159 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2160 struct device *dev = ctx->dev;
2161 const struct talitos_export_state *export = in;
2165 memset(req_ctx, 0, sizeof(*req_ctx));
2166 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
2167 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2168 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
2169 req_ctx->hw_context_size = size;
2170 memcpy(req_ctx->hw_context, export->hw_context, size);
2171 memcpy(req_ctx->buf[0], export->buf, export->nbuf);
2172 req_ctx->swinit = export->swinit;
2173 req_ctx->first = export->first;
2174 req_ctx->last = export->last;
2175 req_ctx->to_hash_later = export->to_hash_later;
2176 req_ctx->nbuf = export->nbuf;
2178 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2180 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2185 static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2188 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2190 struct scatterlist sg[1];
2191 struct ahash_request *req;
2192 struct crypto_wait wait;
2195 crypto_init_wait(&wait);
2197 req = ahash_request_alloc(tfm, GFP_KERNEL);
2201 /* Keep tfm keylen == 0 during hash of the long key */
2203 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2204 crypto_req_done, &wait);
2206 sg_init_one(&sg[0], key, keylen);
2208 ahash_request_set_crypt(req, sg, hash, keylen);
2209 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2211 ahash_request_free(req);
2216 static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2217 unsigned int keylen)
2219 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2220 struct device *dev = ctx->dev;
2221 unsigned int blocksize =
2222 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2223 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2224 unsigned int keysize = keylen;
2225 u8 hash[SHA512_DIGEST_SIZE];
2228 if (keylen <= blocksize)
2229 memcpy(ctx->key, key, keysize);
2231 /* Must get the hash of the long key */
2232 ret = keyhash(tfm, key, keylen, hash);
2237 keysize = digestsize;
2238 memcpy(ctx->key, hash, digestsize);
2242 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2244 ctx->keylen = keysize;
2245 ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
2251 struct talitos_alg_template {
2255 struct skcipher_alg skcipher;
2256 struct ahash_alg hash;
2257 struct aead_alg aead;
2259 __be32 desc_hdr_template;
2262 static struct talitos_alg_template driver_algs[] = {
2263 /* AEAD algorithms. These use a single-pass ipsec_esp descriptor */
2264 { .type = CRYPTO_ALG_TYPE_AEAD,
2267 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2268 .cra_driver_name = "authenc-hmac-sha1-"
2270 .cra_blocksize = AES_BLOCK_SIZE,
2271 .cra_flags = CRYPTO_ALG_ASYNC |
2272 CRYPTO_ALG_ALLOCATES_MEMORY,
2274 .ivsize = AES_BLOCK_SIZE,
2275 .maxauthsize = SHA1_DIGEST_SIZE,
2277 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2278 DESC_HDR_SEL0_AESU |
2279 DESC_HDR_MODE0_AESU_CBC |
2280 DESC_HDR_SEL1_MDEUA |
2281 DESC_HDR_MODE1_MDEU_INIT |
2282 DESC_HDR_MODE1_MDEU_PAD |
2283 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2285 { .type = CRYPTO_ALG_TYPE_AEAD,
2286 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2289 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2290 .cra_driver_name = "authenc-hmac-sha1-"
2291 "cbc-aes-talitos-hsna",
2292 .cra_blocksize = AES_BLOCK_SIZE,
2293 .cra_flags = CRYPTO_ALG_ASYNC |
2294 CRYPTO_ALG_ALLOCATES_MEMORY,
2296 .ivsize = AES_BLOCK_SIZE,
2297 .maxauthsize = SHA1_DIGEST_SIZE,
2299 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2300 DESC_HDR_SEL0_AESU |
2301 DESC_HDR_MODE0_AESU_CBC |
2302 DESC_HDR_SEL1_MDEUA |
2303 DESC_HDR_MODE1_MDEU_INIT |
2304 DESC_HDR_MODE1_MDEU_PAD |
2305 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2307 { .type = CRYPTO_ALG_TYPE_AEAD,
2310 .cra_name = "authenc(hmac(sha1),"
2312 .cra_driver_name = "authenc-hmac-sha1-"
2314 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2315 .cra_flags = CRYPTO_ALG_ASYNC |
2316 CRYPTO_ALG_ALLOCATES_MEMORY,
2318 .ivsize = DES3_EDE_BLOCK_SIZE,
2319 .maxauthsize = SHA1_DIGEST_SIZE,
2320 .setkey = aead_des3_setkey,
2322 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2324 DESC_HDR_MODE0_DEU_CBC |
2325 DESC_HDR_MODE0_DEU_3DES |
2326 DESC_HDR_SEL1_MDEUA |
2327 DESC_HDR_MODE1_MDEU_INIT |
2328 DESC_HDR_MODE1_MDEU_PAD |
2329 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2331 { .type = CRYPTO_ALG_TYPE_AEAD,
2332 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2335 .cra_name = "authenc(hmac(sha1),"
2337 .cra_driver_name = "authenc-hmac-sha1-"
2338 "cbc-3des-talitos-hsna",
2339 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2340 .cra_flags = CRYPTO_ALG_ASYNC |
2341 CRYPTO_ALG_ALLOCATES_MEMORY,
2343 .ivsize = DES3_EDE_BLOCK_SIZE,
2344 .maxauthsize = SHA1_DIGEST_SIZE,
2345 .setkey = aead_des3_setkey,
2347 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2349 DESC_HDR_MODE0_DEU_CBC |
2350 DESC_HDR_MODE0_DEU_3DES |
2351 DESC_HDR_SEL1_MDEUA |
2352 DESC_HDR_MODE1_MDEU_INIT |
2353 DESC_HDR_MODE1_MDEU_PAD |
2354 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2356 { .type = CRYPTO_ALG_TYPE_AEAD,
2359 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2360 .cra_driver_name = "authenc-hmac-sha224-"
2362 .cra_blocksize = AES_BLOCK_SIZE,
2363 .cra_flags = CRYPTO_ALG_ASYNC |
2364 CRYPTO_ALG_ALLOCATES_MEMORY,
2366 .ivsize = AES_BLOCK_SIZE,
2367 .maxauthsize = SHA224_DIGEST_SIZE,
2369 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2370 DESC_HDR_SEL0_AESU |
2371 DESC_HDR_MODE0_AESU_CBC |
2372 DESC_HDR_SEL1_MDEUA |
2373 DESC_HDR_MODE1_MDEU_INIT |
2374 DESC_HDR_MODE1_MDEU_PAD |
2375 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2377 { .type = CRYPTO_ALG_TYPE_AEAD,
2378 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2381 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2382 .cra_driver_name = "authenc-hmac-sha224-"
2383 "cbc-aes-talitos-hsna",
2384 .cra_blocksize = AES_BLOCK_SIZE,
2385 .cra_flags = CRYPTO_ALG_ASYNC |
2386 CRYPTO_ALG_ALLOCATES_MEMORY,
2388 .ivsize = AES_BLOCK_SIZE,
2389 .maxauthsize = SHA224_DIGEST_SIZE,
2391 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2392 DESC_HDR_SEL0_AESU |
2393 DESC_HDR_MODE0_AESU_CBC |
2394 DESC_HDR_SEL1_MDEUA |
2395 DESC_HDR_MODE1_MDEU_INIT |
2396 DESC_HDR_MODE1_MDEU_PAD |
2397 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2399 { .type = CRYPTO_ALG_TYPE_AEAD,
2402 .cra_name = "authenc(hmac(sha224),"
2404 .cra_driver_name = "authenc-hmac-sha224-"
2406 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2407 .cra_flags = CRYPTO_ALG_ASYNC |
2408 CRYPTO_ALG_ALLOCATES_MEMORY,
2410 .ivsize = DES3_EDE_BLOCK_SIZE,
2411 .maxauthsize = SHA224_DIGEST_SIZE,
2412 .setkey = aead_des3_setkey,
2414 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2416 DESC_HDR_MODE0_DEU_CBC |
2417 DESC_HDR_MODE0_DEU_3DES |
2418 DESC_HDR_SEL1_MDEUA |
2419 DESC_HDR_MODE1_MDEU_INIT |
2420 DESC_HDR_MODE1_MDEU_PAD |
2421 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2423 { .type = CRYPTO_ALG_TYPE_AEAD,
2424 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2427 .cra_name = "authenc(hmac(sha224),"
2429 .cra_driver_name = "authenc-hmac-sha224-"
2430 "cbc-3des-talitos-hsna",
2431 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2432 .cra_flags = CRYPTO_ALG_ASYNC |
2433 CRYPTO_ALG_ALLOCATES_MEMORY,
2435 .ivsize = DES3_EDE_BLOCK_SIZE,
2436 .maxauthsize = SHA224_DIGEST_SIZE,
2437 .setkey = aead_des3_setkey,
2439 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2441 DESC_HDR_MODE0_DEU_CBC |
2442 DESC_HDR_MODE0_DEU_3DES |
2443 DESC_HDR_SEL1_MDEUA |
2444 DESC_HDR_MODE1_MDEU_INIT |
2445 DESC_HDR_MODE1_MDEU_PAD |
2446 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2448 { .type = CRYPTO_ALG_TYPE_AEAD,
2451 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2452 .cra_driver_name = "authenc-hmac-sha256-"
2454 .cra_blocksize = AES_BLOCK_SIZE,
2455 .cra_flags = CRYPTO_ALG_ASYNC |
2456 CRYPTO_ALG_ALLOCATES_MEMORY,
2458 .ivsize = AES_BLOCK_SIZE,
2459 .maxauthsize = SHA256_DIGEST_SIZE,
2461 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2462 DESC_HDR_SEL0_AESU |
2463 DESC_HDR_MODE0_AESU_CBC |
2464 DESC_HDR_SEL1_MDEUA |
2465 DESC_HDR_MODE1_MDEU_INIT |
2466 DESC_HDR_MODE1_MDEU_PAD |
2467 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2469 { .type = CRYPTO_ALG_TYPE_AEAD,
2470 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2473 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2474 .cra_driver_name = "authenc-hmac-sha256-"
2475 "cbc-aes-talitos-hsna",
2476 .cra_blocksize = AES_BLOCK_SIZE,
2477 .cra_flags = CRYPTO_ALG_ASYNC |
2478 CRYPTO_ALG_ALLOCATES_MEMORY,
2480 .ivsize = AES_BLOCK_SIZE,
2481 .maxauthsize = SHA256_DIGEST_SIZE,
2483 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2484 DESC_HDR_SEL0_AESU |
2485 DESC_HDR_MODE0_AESU_CBC |
2486 DESC_HDR_SEL1_MDEUA |
2487 DESC_HDR_MODE1_MDEU_INIT |
2488 DESC_HDR_MODE1_MDEU_PAD |
2489 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2491 { .type = CRYPTO_ALG_TYPE_AEAD,
2494 .cra_name = "authenc(hmac(sha256),"
2496 .cra_driver_name = "authenc-hmac-sha256-"
2498 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2499 .cra_flags = CRYPTO_ALG_ASYNC |
2500 CRYPTO_ALG_ALLOCATES_MEMORY,
2502 .ivsize = DES3_EDE_BLOCK_SIZE,
2503 .maxauthsize = SHA256_DIGEST_SIZE,
2504 .setkey = aead_des3_setkey,
2506 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2508 DESC_HDR_MODE0_DEU_CBC |
2509 DESC_HDR_MODE0_DEU_3DES |
2510 DESC_HDR_SEL1_MDEUA |
2511 DESC_HDR_MODE1_MDEU_INIT |
2512 DESC_HDR_MODE1_MDEU_PAD |
2513 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2515 { .type = CRYPTO_ALG_TYPE_AEAD,
2516 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2519 .cra_name = "authenc(hmac(sha256),"
2521 .cra_driver_name = "authenc-hmac-sha256-"
2522 "cbc-3des-talitos-hsna",
2523 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2524 .cra_flags = CRYPTO_ALG_ASYNC |
2525 CRYPTO_ALG_ALLOCATES_MEMORY,
2527 .ivsize = DES3_EDE_BLOCK_SIZE,
2528 .maxauthsize = SHA256_DIGEST_SIZE,
2529 .setkey = aead_des3_setkey,
2531 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2533 DESC_HDR_MODE0_DEU_CBC |
2534 DESC_HDR_MODE0_DEU_3DES |
2535 DESC_HDR_SEL1_MDEUA |
2536 DESC_HDR_MODE1_MDEU_INIT |
2537 DESC_HDR_MODE1_MDEU_PAD |
2538 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2540 { .type = CRYPTO_ALG_TYPE_AEAD,
2543 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2544 .cra_driver_name = "authenc-hmac-sha384-"
2546 .cra_blocksize = AES_BLOCK_SIZE,
2547 .cra_flags = CRYPTO_ALG_ASYNC |
2548 CRYPTO_ALG_ALLOCATES_MEMORY,
2550 .ivsize = AES_BLOCK_SIZE,
2551 .maxauthsize = SHA384_DIGEST_SIZE,
2553 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2554 DESC_HDR_SEL0_AESU |
2555 DESC_HDR_MODE0_AESU_CBC |
2556 DESC_HDR_SEL1_MDEUB |
2557 DESC_HDR_MODE1_MDEU_INIT |
2558 DESC_HDR_MODE1_MDEU_PAD |
2559 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2561 { .type = CRYPTO_ALG_TYPE_AEAD,
2564 .cra_name = "authenc(hmac(sha384),"
2566 .cra_driver_name = "authenc-hmac-sha384-"
2568 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2569 .cra_flags = CRYPTO_ALG_ASYNC |
2570 CRYPTO_ALG_ALLOCATES_MEMORY,
2572 .ivsize = DES3_EDE_BLOCK_SIZE,
2573 .maxauthsize = SHA384_DIGEST_SIZE,
2574 .setkey = aead_des3_setkey,
2576 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2578 DESC_HDR_MODE0_DEU_CBC |
2579 DESC_HDR_MODE0_DEU_3DES |
2580 DESC_HDR_SEL1_MDEUB |
2581 DESC_HDR_MODE1_MDEU_INIT |
2582 DESC_HDR_MODE1_MDEU_PAD |
2583 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2585 { .type = CRYPTO_ALG_TYPE_AEAD,
2588 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2589 .cra_driver_name = "authenc-hmac-sha512-"
2591 .cra_blocksize = AES_BLOCK_SIZE,
2592 .cra_flags = CRYPTO_ALG_ASYNC |
2593 CRYPTO_ALG_ALLOCATES_MEMORY,
2595 .ivsize = AES_BLOCK_SIZE,
2596 .maxauthsize = SHA512_DIGEST_SIZE,
2598 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2599 DESC_HDR_SEL0_AESU |
2600 DESC_HDR_MODE0_AESU_CBC |
2601 DESC_HDR_SEL1_MDEUB |
2602 DESC_HDR_MODE1_MDEU_INIT |
2603 DESC_HDR_MODE1_MDEU_PAD |
2604 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2606 { .type = CRYPTO_ALG_TYPE_AEAD,
2609 .cra_name = "authenc(hmac(sha512),"
2611 .cra_driver_name = "authenc-hmac-sha512-"
2613 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2614 .cra_flags = CRYPTO_ALG_ASYNC |
2615 CRYPTO_ALG_ALLOCATES_MEMORY,
2617 .ivsize = DES3_EDE_BLOCK_SIZE,
2618 .maxauthsize = SHA512_DIGEST_SIZE,
2619 .setkey = aead_des3_setkey,
2621 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2623 DESC_HDR_MODE0_DEU_CBC |
2624 DESC_HDR_MODE0_DEU_3DES |
2625 DESC_HDR_SEL1_MDEUB |
2626 DESC_HDR_MODE1_MDEU_INIT |
2627 DESC_HDR_MODE1_MDEU_PAD |
2628 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2630 { .type = CRYPTO_ALG_TYPE_AEAD,
2633 .cra_name = "authenc(hmac(md5),cbc(aes))",
2634 .cra_driver_name = "authenc-hmac-md5-"
2636 .cra_blocksize = AES_BLOCK_SIZE,
2637 .cra_flags = CRYPTO_ALG_ASYNC |
2638 CRYPTO_ALG_ALLOCATES_MEMORY,
2640 .ivsize = AES_BLOCK_SIZE,
2641 .maxauthsize = MD5_DIGEST_SIZE,
2643 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2644 DESC_HDR_SEL0_AESU |
2645 DESC_HDR_MODE0_AESU_CBC |
2646 DESC_HDR_SEL1_MDEUA |
2647 DESC_HDR_MODE1_MDEU_INIT |
2648 DESC_HDR_MODE1_MDEU_PAD |
2649 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2651 { .type = CRYPTO_ALG_TYPE_AEAD,
2652 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2655 .cra_name = "authenc(hmac(md5),cbc(aes))",
2656 .cra_driver_name = "authenc-hmac-md5-"
2657 "cbc-aes-talitos-hsna",
2658 .cra_blocksize = AES_BLOCK_SIZE,
2659 .cra_flags = CRYPTO_ALG_ASYNC |
2660 CRYPTO_ALG_ALLOCATES_MEMORY,
2662 .ivsize = AES_BLOCK_SIZE,
2663 .maxauthsize = MD5_DIGEST_SIZE,
2665 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2666 DESC_HDR_SEL0_AESU |
2667 DESC_HDR_MODE0_AESU_CBC |
2668 DESC_HDR_SEL1_MDEUA |
2669 DESC_HDR_MODE1_MDEU_INIT |
2670 DESC_HDR_MODE1_MDEU_PAD |
2671 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2673 { .type = CRYPTO_ALG_TYPE_AEAD,
2676 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2677 .cra_driver_name = "authenc-hmac-md5-"
2679 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2680 .cra_flags = CRYPTO_ALG_ASYNC |
2681 CRYPTO_ALG_ALLOCATES_MEMORY,
2683 .ivsize = DES3_EDE_BLOCK_SIZE,
2684 .maxauthsize = MD5_DIGEST_SIZE,
2685 .setkey = aead_des3_setkey,
2687 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2689 DESC_HDR_MODE0_DEU_CBC |
2690 DESC_HDR_MODE0_DEU_3DES |
2691 DESC_HDR_SEL1_MDEUA |
2692 DESC_HDR_MODE1_MDEU_INIT |
2693 DESC_HDR_MODE1_MDEU_PAD |
2694 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2696 { .type = CRYPTO_ALG_TYPE_AEAD,
2697 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2700 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2701 .cra_driver_name = "authenc-hmac-md5-"
2702 "cbc-3des-talitos-hsna",
2703 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2704 .cra_flags = CRYPTO_ALG_ASYNC |
2705 CRYPTO_ALG_ALLOCATES_MEMORY,
2707 .ivsize = DES3_EDE_BLOCK_SIZE,
2708 .maxauthsize = MD5_DIGEST_SIZE,
2709 .setkey = aead_des3_setkey,
2711 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2713 DESC_HDR_MODE0_DEU_CBC |
2714 DESC_HDR_MODE0_DEU_3DES |
2715 DESC_HDR_SEL1_MDEUA |
2716 DESC_HDR_MODE1_MDEU_INIT |
2717 DESC_HDR_MODE1_MDEU_PAD |
2718 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2720 /* SKCIPHER algorithms. */
2721 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2723 .base.cra_name = "ecb(aes)",
2724 .base.cra_driver_name = "ecb-aes-talitos",
2725 .base.cra_blocksize = AES_BLOCK_SIZE,
2726 .base.cra_flags = CRYPTO_ALG_ASYNC |
2727 CRYPTO_ALG_ALLOCATES_MEMORY,
2728 .min_keysize = AES_MIN_KEY_SIZE,
2729 .max_keysize = AES_MAX_KEY_SIZE,
2730 .setkey = skcipher_aes_setkey,
2732 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2735 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2737 .base.cra_name = "cbc(aes)",
2738 .base.cra_driver_name = "cbc-aes-talitos",
2739 .base.cra_blocksize = AES_BLOCK_SIZE,
2740 .base.cra_flags = CRYPTO_ALG_ASYNC |
2741 CRYPTO_ALG_ALLOCATES_MEMORY,
2742 .min_keysize = AES_MIN_KEY_SIZE,
2743 .max_keysize = AES_MAX_KEY_SIZE,
2744 .ivsize = AES_BLOCK_SIZE,
2745 .setkey = skcipher_aes_setkey,
2747 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2748 DESC_HDR_SEL0_AESU |
2749 DESC_HDR_MODE0_AESU_CBC,
2751 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2753 .base.cra_name = "ctr(aes)",
2754 .base.cra_driver_name = "ctr-aes-talitos",
2755 .base.cra_blocksize = 1,
2756 .base.cra_flags = CRYPTO_ALG_ASYNC |
2757 CRYPTO_ALG_ALLOCATES_MEMORY,
2758 .min_keysize = AES_MIN_KEY_SIZE,
2759 .max_keysize = AES_MAX_KEY_SIZE,
2760 .ivsize = AES_BLOCK_SIZE,
2761 .setkey = skcipher_aes_setkey,
2763 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
2764 DESC_HDR_SEL0_AESU |
2765 DESC_HDR_MODE0_AESU_CTR,
2767 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2769 .base.cra_name = "ctr(aes)",
2770 .base.cra_driver_name = "ctr-aes-talitos",
2771 .base.cra_blocksize = 1,
2772 .base.cra_flags = CRYPTO_ALG_ASYNC |
2773 CRYPTO_ALG_ALLOCATES_MEMORY,
2774 .min_keysize = AES_MIN_KEY_SIZE,
2775 .max_keysize = AES_MAX_KEY_SIZE,
2776 .ivsize = AES_BLOCK_SIZE,
2777 .setkey = skcipher_aes_setkey,
2779 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2780 DESC_HDR_SEL0_AESU |
2781 DESC_HDR_MODE0_AESU_CTR,
2783 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2785 .base.cra_name = "ecb(des)",
2786 .base.cra_driver_name = "ecb-des-talitos",
2787 .base.cra_blocksize = DES_BLOCK_SIZE,
2788 .base.cra_flags = CRYPTO_ALG_ASYNC |
2789 CRYPTO_ALG_ALLOCATES_MEMORY,
2790 .min_keysize = DES_KEY_SIZE,
2791 .max_keysize = DES_KEY_SIZE,
2792 .setkey = skcipher_des_setkey,
2794 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2797 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2799 .base.cra_name = "cbc(des)",
2800 .base.cra_driver_name = "cbc-des-talitos",
2801 .base.cra_blocksize = DES_BLOCK_SIZE,
2802 .base.cra_flags = CRYPTO_ALG_ASYNC |
2803 CRYPTO_ALG_ALLOCATES_MEMORY,
2804 .min_keysize = DES_KEY_SIZE,
2805 .max_keysize = DES_KEY_SIZE,
2806 .ivsize = DES_BLOCK_SIZE,
2807 .setkey = skcipher_des_setkey,
2809 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2811 DESC_HDR_MODE0_DEU_CBC,
2813 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2815 .base.cra_name = "ecb(des3_ede)",
2816 .base.cra_driver_name = "ecb-3des-talitos",
2817 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2818 .base.cra_flags = CRYPTO_ALG_ASYNC |
2819 CRYPTO_ALG_ALLOCATES_MEMORY,
2820 .min_keysize = DES3_EDE_KEY_SIZE,
2821 .max_keysize = DES3_EDE_KEY_SIZE,
2822 .setkey = skcipher_des3_setkey,
2824 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2826 DESC_HDR_MODE0_DEU_3DES,
2828 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2830 .base.cra_name = "cbc(des3_ede)",
2831 .base.cra_driver_name = "cbc-3des-talitos",
2832 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2833 .base.cra_flags = CRYPTO_ALG_ASYNC |
2834 CRYPTO_ALG_ALLOCATES_MEMORY,
2835 .min_keysize = DES3_EDE_KEY_SIZE,
2836 .max_keysize = DES3_EDE_KEY_SIZE,
2837 .ivsize = DES3_EDE_BLOCK_SIZE,
2838 .setkey = skcipher_des3_setkey,
2840 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2842 DESC_HDR_MODE0_DEU_CBC |
2843 DESC_HDR_MODE0_DEU_3DES,
2845 /* AHASH algorithms. */
2846 { .type = CRYPTO_ALG_TYPE_AHASH,
2848 .halg.digestsize = MD5_DIGEST_SIZE,
2849 .halg.statesize = sizeof(struct talitos_export_state),
2852 .cra_driver_name = "md5-talitos",
2853 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2854 .cra_flags = CRYPTO_ALG_ASYNC |
2855 CRYPTO_ALG_ALLOCATES_MEMORY,
2858 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2859 DESC_HDR_SEL0_MDEUA |
2860 DESC_HDR_MODE0_MDEU_MD5,
2862 { .type = CRYPTO_ALG_TYPE_AHASH,
2864 .halg.digestsize = SHA1_DIGEST_SIZE,
2865 .halg.statesize = sizeof(struct talitos_export_state),
2868 .cra_driver_name = "sha1-talitos",
2869 .cra_blocksize = SHA1_BLOCK_SIZE,
2870 .cra_flags = CRYPTO_ALG_ASYNC |
2871 CRYPTO_ALG_ALLOCATES_MEMORY,
2874 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2875 DESC_HDR_SEL0_MDEUA |
2876 DESC_HDR_MODE0_MDEU_SHA1,
2878 { .type = CRYPTO_ALG_TYPE_AHASH,
2880 .halg.digestsize = SHA224_DIGEST_SIZE,
2881 .halg.statesize = sizeof(struct talitos_export_state),
2883 .cra_name = "sha224",
2884 .cra_driver_name = "sha224-talitos",
2885 .cra_blocksize = SHA224_BLOCK_SIZE,
2886 .cra_flags = CRYPTO_ALG_ASYNC |
2887 CRYPTO_ALG_ALLOCATES_MEMORY,
2890 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2891 DESC_HDR_SEL0_MDEUA |
2892 DESC_HDR_MODE0_MDEU_SHA224,
2894 { .type = CRYPTO_ALG_TYPE_AHASH,
2896 .halg.digestsize = SHA256_DIGEST_SIZE,
2897 .halg.statesize = sizeof(struct talitos_export_state),
2899 .cra_name = "sha256",
2900 .cra_driver_name = "sha256-talitos",
2901 .cra_blocksize = SHA256_BLOCK_SIZE,
2902 .cra_flags = CRYPTO_ALG_ASYNC |
2903 CRYPTO_ALG_ALLOCATES_MEMORY,
2906 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2907 DESC_HDR_SEL0_MDEUA |
2908 DESC_HDR_MODE0_MDEU_SHA256,
2910 { .type = CRYPTO_ALG_TYPE_AHASH,
2912 .halg.digestsize = SHA384_DIGEST_SIZE,
2913 .halg.statesize = sizeof(struct talitos_export_state),
2915 .cra_name = "sha384",
2916 .cra_driver_name = "sha384-talitos",
2917 .cra_blocksize = SHA384_BLOCK_SIZE,
2918 .cra_flags = CRYPTO_ALG_ASYNC |
2919 CRYPTO_ALG_ALLOCATES_MEMORY,
2922 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2923 DESC_HDR_SEL0_MDEUB |
2924 DESC_HDR_MODE0_MDEUB_SHA384,
2926 { .type = CRYPTO_ALG_TYPE_AHASH,
2928 .halg.digestsize = SHA512_DIGEST_SIZE,
2929 .halg.statesize = sizeof(struct talitos_export_state),
2931 .cra_name = "sha512",
2932 .cra_driver_name = "sha512-talitos",
2933 .cra_blocksize = SHA512_BLOCK_SIZE,
2934 .cra_flags = CRYPTO_ALG_ASYNC |
2935 CRYPTO_ALG_ALLOCATES_MEMORY,
2938 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2939 DESC_HDR_SEL0_MDEUB |
2940 DESC_HDR_MODE0_MDEUB_SHA512,
2942 { .type = CRYPTO_ALG_TYPE_AHASH,
2944 .halg.digestsize = MD5_DIGEST_SIZE,
2945 .halg.statesize = sizeof(struct talitos_export_state),
2947 .cra_name = "hmac(md5)",
2948 .cra_driver_name = "hmac-md5-talitos",
2949 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2950 .cra_flags = CRYPTO_ALG_ASYNC |
2951 CRYPTO_ALG_ALLOCATES_MEMORY,
2954 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2955 DESC_HDR_SEL0_MDEUA |
2956 DESC_HDR_MODE0_MDEU_MD5,
2958 { .type = CRYPTO_ALG_TYPE_AHASH,
2960 .halg.digestsize = SHA1_DIGEST_SIZE,
2961 .halg.statesize = sizeof(struct talitos_export_state),
2963 .cra_name = "hmac(sha1)",
2964 .cra_driver_name = "hmac-sha1-talitos",
2965 .cra_blocksize = SHA1_BLOCK_SIZE,
2966 .cra_flags = CRYPTO_ALG_ASYNC |
2967 CRYPTO_ALG_ALLOCATES_MEMORY,
2970 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2971 DESC_HDR_SEL0_MDEUA |
2972 DESC_HDR_MODE0_MDEU_SHA1,
2974 { .type = CRYPTO_ALG_TYPE_AHASH,
2976 .halg.digestsize = SHA224_DIGEST_SIZE,
2977 .halg.statesize = sizeof(struct talitos_export_state),
2979 .cra_name = "hmac(sha224)",
2980 .cra_driver_name = "hmac-sha224-talitos",
2981 .cra_blocksize = SHA224_BLOCK_SIZE,
2982 .cra_flags = CRYPTO_ALG_ASYNC |
2983 CRYPTO_ALG_ALLOCATES_MEMORY,
2986 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2987 DESC_HDR_SEL0_MDEUA |
2988 DESC_HDR_MODE0_MDEU_SHA224,
2990 { .type = CRYPTO_ALG_TYPE_AHASH,
2992 .halg.digestsize = SHA256_DIGEST_SIZE,
2993 .halg.statesize = sizeof(struct talitos_export_state),
2995 .cra_name = "hmac(sha256)",
2996 .cra_driver_name = "hmac-sha256-talitos",
2997 .cra_blocksize = SHA256_BLOCK_SIZE,
2998 .cra_flags = CRYPTO_ALG_ASYNC |
2999 CRYPTO_ALG_ALLOCATES_MEMORY,
3002 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3003 DESC_HDR_SEL0_MDEUA |
3004 DESC_HDR_MODE0_MDEU_SHA256,
3006 { .type = CRYPTO_ALG_TYPE_AHASH,
3008 .halg.digestsize = SHA384_DIGEST_SIZE,
3009 .halg.statesize = sizeof(struct talitos_export_state),
3011 .cra_name = "hmac(sha384)",
3012 .cra_driver_name = "hmac-sha384-talitos",
3013 .cra_blocksize = SHA384_BLOCK_SIZE,
3014 .cra_flags = CRYPTO_ALG_ASYNC |
3015 CRYPTO_ALG_ALLOCATES_MEMORY,
3018 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3019 DESC_HDR_SEL0_MDEUB |
3020 DESC_HDR_MODE0_MDEUB_SHA384,
3022 { .type = CRYPTO_ALG_TYPE_AHASH,
3024 .halg.digestsize = SHA512_DIGEST_SIZE,
3025 .halg.statesize = sizeof(struct talitos_export_state),
3027 .cra_name = "hmac(sha512)",
3028 .cra_driver_name = "hmac-sha512-talitos",
3029 .cra_blocksize = SHA512_BLOCK_SIZE,
3030 .cra_flags = CRYPTO_ALG_ASYNC |
3031 CRYPTO_ALG_ALLOCATES_MEMORY,
3034 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3035 DESC_HDR_SEL0_MDEUB |
3036 DESC_HDR_MODE0_MDEUB_SHA512,
3040 struct talitos_crypto_alg {
3041 struct list_head entry;
3043 struct talitos_alg_template algt;
3046 static int talitos_init_common(struct talitos_ctx *ctx,
3047 struct talitos_crypto_alg *talitos_alg)
3049 struct talitos_private *priv;
3051 /* update context with ptr to dev */
3052 ctx->dev = talitos_alg->dev;
3054 /* assign SEC channel to tfm in round-robin fashion */
3055 priv = dev_get_drvdata(ctx->dev);
3056 ctx->ch = atomic_inc_return(&priv->last_chan) &
3057 (priv->num_channels - 1);
3059 /* copy descriptor header template value */
3060 ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
3062 /* select done notification */
3063 ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
3068 static int talitos_cra_init_aead(struct crypto_aead *tfm)
3070 struct aead_alg *alg = crypto_aead_alg(tfm);
3071 struct talitos_crypto_alg *talitos_alg;
3072 struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
3074 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3077 return talitos_init_common(ctx, talitos_alg);
3080 static int talitos_cra_init_skcipher(struct crypto_skcipher *tfm)
3082 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3083 struct talitos_crypto_alg *talitos_alg;
3084 struct talitos_ctx *ctx = crypto_skcipher_ctx(tfm);
3086 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3089 return talitos_init_common(ctx, talitos_alg);
3092 static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3094 struct crypto_alg *alg = tfm->__crt_alg;
3095 struct talitos_crypto_alg *talitos_alg;
3096 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3098 talitos_alg = container_of(__crypto_ahash_alg(alg),
3099 struct talitos_crypto_alg,
3103 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3104 sizeof(struct talitos_ahash_req_ctx));
3106 return talitos_init_common(ctx, talitos_alg);
3109 static void talitos_cra_exit(struct crypto_tfm *tfm)
3111 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3112 struct device *dev = ctx->dev;
3115 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3119 * given the alg's descriptor header template, determine whether descriptor
3120 * type and primary/secondary execution units required match the hw
3121 * capabilities description provided in the device tree node.
3123 static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3125 struct talitos_private *priv = dev_get_drvdata(dev);
3128 ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3129 (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3131 if (SECONDARY_EU(desc_hdr_template))
3132 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3133 & priv->exec_units);
3138 static int talitos_remove(struct platform_device *ofdev)
3140 struct device *dev = &ofdev->dev;
3141 struct talitos_private *priv = dev_get_drvdata(dev);
3142 struct talitos_crypto_alg *t_alg, *n;
3145 list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
3146 switch (t_alg->algt.type) {
3147 case CRYPTO_ALG_TYPE_SKCIPHER:
3148 crypto_unregister_skcipher(&t_alg->algt.alg.skcipher);
3150 case CRYPTO_ALG_TYPE_AEAD:
3151 crypto_unregister_aead(&t_alg->algt.alg.aead);
3153 case CRYPTO_ALG_TYPE_AHASH:
3154 crypto_unregister_ahash(&t_alg->algt.alg.hash);
3157 list_del(&t_alg->entry);
3160 if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3161 talitos_unregister_rng(dev);
3163 for (i = 0; i < 2; i++)
3165 free_irq(priv->irq[i], dev);
3166 irq_dispose_mapping(priv->irq[i]);
3169 tasklet_kill(&priv->done_task[0]);
3171 tasklet_kill(&priv->done_task[1]);
3176 static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3177 struct talitos_alg_template
3180 struct talitos_private *priv = dev_get_drvdata(dev);
3181 struct talitos_crypto_alg *t_alg;
3182 struct crypto_alg *alg;
3184 t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3187 return ERR_PTR(-ENOMEM);
3189 t_alg->algt = *template;
3191 switch (t_alg->algt.type) {
3192 case CRYPTO_ALG_TYPE_SKCIPHER:
3193 alg = &t_alg->algt.alg.skcipher.base;
3194 alg->cra_exit = talitos_cra_exit;
3195 t_alg->algt.alg.skcipher.init = talitos_cra_init_skcipher;
3196 t_alg->algt.alg.skcipher.setkey =
3197 t_alg->algt.alg.skcipher.setkey ?: skcipher_setkey;
3198 t_alg->algt.alg.skcipher.encrypt = skcipher_encrypt;
3199 t_alg->algt.alg.skcipher.decrypt = skcipher_decrypt;
3200 if (!strcmp(alg->cra_name, "ctr(aes)") && !has_ftr_sec1(priv) &&
3201 DESC_TYPE(t_alg->algt.desc_hdr_template) !=
3202 DESC_TYPE(DESC_HDR_TYPE_AESU_CTR_NONSNOOP)) {
3203 devm_kfree(dev, t_alg);
3204 return ERR_PTR(-ENOTSUPP);
3207 case CRYPTO_ALG_TYPE_AEAD:
3208 alg = &t_alg->algt.alg.aead.base;
3209 alg->cra_exit = talitos_cra_exit;
3210 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
3211 t_alg->algt.alg.aead.setkey = t_alg->algt.alg.aead.setkey ?:
3213 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3214 t_alg->algt.alg.aead.decrypt = aead_decrypt;
3215 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3216 !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
3217 devm_kfree(dev, t_alg);
3218 return ERR_PTR(-ENOTSUPP);
3221 case CRYPTO_ALG_TYPE_AHASH:
3222 alg = &t_alg->algt.alg.hash.halg.base;
3223 alg->cra_init = talitos_cra_init_ahash;
3224 alg->cra_exit = talitos_cra_exit;
3225 t_alg->algt.alg.hash.init = ahash_init;
3226 t_alg->algt.alg.hash.update = ahash_update;
3227 t_alg->algt.alg.hash.final = ahash_final;
3228 t_alg->algt.alg.hash.finup = ahash_finup;
3229 t_alg->algt.alg.hash.digest = ahash_digest;
3230 if (!strncmp(alg->cra_name, "hmac", 4))
3231 t_alg->algt.alg.hash.setkey = ahash_setkey;
3232 t_alg->algt.alg.hash.import = ahash_import;
3233 t_alg->algt.alg.hash.export = ahash_export;
3235 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
3236 !strncmp(alg->cra_name, "hmac", 4)) {
3237 devm_kfree(dev, t_alg);
3238 return ERR_PTR(-ENOTSUPP);
3240 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3241 (!strcmp(alg->cra_name, "sha224") ||
3242 !strcmp(alg->cra_name, "hmac(sha224)"))) {
3243 t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3244 t_alg->algt.desc_hdr_template =
3245 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3246 DESC_HDR_SEL0_MDEUA |
3247 DESC_HDR_MODE0_MDEU_SHA256;
3251 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
3252 devm_kfree(dev, t_alg);
3253 return ERR_PTR(-EINVAL);
3256 alg->cra_module = THIS_MODULE;
3257 if (t_alg->algt.priority)
3258 alg->cra_priority = t_alg->algt.priority;
3260 alg->cra_priority = TALITOS_CRA_PRIORITY;
3261 if (has_ftr_sec1(priv))
3262 alg->cra_alignmask = 3;
3264 alg->cra_alignmask = 0;
3265 alg->cra_ctxsize = sizeof(struct talitos_ctx);
3266 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
3273 static int talitos_probe_irq(struct platform_device *ofdev)
3275 struct device *dev = &ofdev->dev;
3276 struct device_node *np = ofdev->dev.of_node;
3277 struct talitos_private *priv = dev_get_drvdata(dev);
3279 bool is_sec1 = has_ftr_sec1(priv);
3281 priv->irq[0] = irq_of_parse_and_map(np, 0);
3282 if (!priv->irq[0]) {
3283 dev_err(dev, "failed to map irq\n");
3287 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3288 dev_driver_string(dev), dev);
3292 priv->irq[1] = irq_of_parse_and_map(np, 1);
3294 /* get the primary irq line */
3295 if (!priv->irq[1]) {
3296 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
3297 dev_driver_string(dev), dev);
3301 err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
3302 dev_driver_string(dev), dev);
3306 /* get the secondary irq line */
3307 err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
3308 dev_driver_string(dev), dev);
3310 dev_err(dev, "failed to request secondary irq\n");
3311 irq_dispose_mapping(priv->irq[1]);
3319 dev_err(dev, "failed to request primary irq\n");
3320 irq_dispose_mapping(priv->irq[0]);
3327 static int talitos_probe(struct platform_device *ofdev)
3329 struct device *dev = &ofdev->dev;
3330 struct device_node *np = ofdev->dev.of_node;
3331 struct talitos_private *priv;
3334 struct resource *res;
3336 priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
3340 INIT_LIST_HEAD(&priv->alg_list);
3342 dev_set_drvdata(dev, priv);
3344 priv->ofdev = ofdev;
3346 spin_lock_init(&priv->reg_lock);
3348 res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3351 priv->reg = devm_ioremap(dev, res->start, resource_size(res));
3353 dev_err(dev, "failed to of_iomap\n");
3358 /* get SEC version capabilities from device tree */
3359 of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3360 of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3361 of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3362 of_property_read_u32(np, "fsl,descriptor-types-mask",
3365 if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3366 !priv->exec_units || !priv->desc_types) {
3367 dev_err(dev, "invalid property data in device tree node\n");
3372 if (of_device_is_compatible(np, "fsl,sec3.0"))
3373 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3375 if (of_device_is_compatible(np, "fsl,sec2.1"))
3376 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
3377 TALITOS_FTR_SHA224_HWINIT |
3378 TALITOS_FTR_HMAC_OK;
3380 if (of_device_is_compatible(np, "fsl,sec1.0"))
3381 priv->features |= TALITOS_FTR_SEC1;
3383 if (of_device_is_compatible(np, "fsl,sec1.2")) {
3384 priv->reg_deu = priv->reg + TALITOS12_DEU;
3385 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3386 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3387 stride = TALITOS1_CH_STRIDE;
3388 } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3389 priv->reg_deu = priv->reg + TALITOS10_DEU;
3390 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3391 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3392 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3393 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3394 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3395 stride = TALITOS1_CH_STRIDE;
3397 priv->reg_deu = priv->reg + TALITOS2_DEU;
3398 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3399 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3400 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3401 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3402 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3403 priv->reg_keu = priv->reg + TALITOS2_KEU;
3404 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3405 stride = TALITOS2_CH_STRIDE;
3408 err = talitos_probe_irq(ofdev);
3412 if (has_ftr_sec1(priv)) {
3413 if (priv->num_channels == 1)
3414 tasklet_init(&priv->done_task[0], talitos1_done_ch0,
3415 (unsigned long)dev);
3417 tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3418 (unsigned long)dev);
3421 tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3422 (unsigned long)dev);
3423 tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3424 (unsigned long)dev);
3425 } else if (priv->num_channels == 1) {
3426 tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3427 (unsigned long)dev);
3429 tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3430 (unsigned long)dev);
3434 priv->chan = devm_kcalloc(dev,
3436 sizeof(struct talitos_channel),
3439 dev_err(dev, "failed to allocate channel management space\n");
3444 priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3446 for (i = 0; i < priv->num_channels; i++) {
3447 priv->chan[i].reg = priv->reg + stride * (i + 1);
3448 if (!priv->irq[1] || !(i & 1))
3449 priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
3451 spin_lock_init(&priv->chan[i].head_lock);
3452 spin_lock_init(&priv->chan[i].tail_lock);
3454 priv->chan[i].fifo = devm_kcalloc(dev,
3456 sizeof(struct talitos_request),
3458 if (!priv->chan[i].fifo) {
3459 dev_err(dev, "failed to allocate request fifo %d\n", i);
3464 atomic_set(&priv->chan[i].submit_count,
3465 -(priv->chfifo_len - 1));
3468 dma_set_mask(dev, DMA_BIT_MASK(36));
3470 /* reset and initialize the h/w */
3471 err = init_device(dev);
3473 dev_err(dev, "failed to initialize device\n");
3477 /* register the RNG, if available */
3478 if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3479 err = talitos_register_rng(dev);
3481 dev_err(dev, "failed to register hwrng: %d\n", err);
3484 dev_info(dev, "hwrng\n");
3487 /* register crypto algorithms the device supports */
3488 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3489 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3490 struct talitos_crypto_alg *t_alg;
3491 struct crypto_alg *alg = NULL;
3493 t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3494 if (IS_ERR(t_alg)) {
3495 err = PTR_ERR(t_alg);
3496 if (err == -ENOTSUPP)
3501 switch (t_alg->algt.type) {
3502 case CRYPTO_ALG_TYPE_SKCIPHER:
3503 err = crypto_register_skcipher(
3504 &t_alg->algt.alg.skcipher);
3505 alg = &t_alg->algt.alg.skcipher.base;
3508 case CRYPTO_ALG_TYPE_AEAD:
3509 err = crypto_register_aead(
3510 &t_alg->algt.alg.aead);
3511 alg = &t_alg->algt.alg.aead.base;
3514 case CRYPTO_ALG_TYPE_AHASH:
3515 err = crypto_register_ahash(
3516 &t_alg->algt.alg.hash);
3517 alg = &t_alg->algt.alg.hash.halg.base;
3521 dev_err(dev, "%s alg registration failed\n",
3522 alg->cra_driver_name);
3523 devm_kfree(dev, t_alg);
3525 list_add_tail(&t_alg->entry, &priv->alg_list);
3528 if (!list_empty(&priv->alg_list))
3529 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3530 (char *)of_get_property(np, "compatible", NULL));
3535 talitos_remove(ofdev);
3540 static const struct of_device_id talitos_match[] = {
3541 #ifdef CONFIG_CRYPTO_DEV_TALITOS1
3543 .compatible = "fsl,sec1.0",
3546 #ifdef CONFIG_CRYPTO_DEV_TALITOS2
3548 .compatible = "fsl,sec2.0",
3553 MODULE_DEVICE_TABLE(of, talitos_match);
3555 static struct platform_driver talitos_driver = {
3558 .of_match_table = talitos_match,
3560 .probe = talitos_probe,
3561 .remove = talitos_remove,
3564 module_platform_driver(talitos_driver);
3566 MODULE_LICENSE("GPL");
3567 MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3568 MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");