GNU Linux-libre 6.8.7-gnu
[releases.git] / drivers / usb / gadget / udc / mv_u3d_core.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2011 Marvell International Ltd. All rights reserved.
4  */
5
6 #include <linux/module.h>
7 #include <linux/dma-mapping.h>
8 #include <linux/dmapool.h>
9 #include <linux/kernel.h>
10 #include <linux/delay.h>
11 #include <linux/ioport.h>
12 #include <linux/sched.h>
13 #include <linux/slab.h>
14 #include <linux/errno.h>
15 #include <linux/timer.h>
16 #include <linux/list.h>
17 #include <linux/notifier.h>
18 #include <linux/interrupt.h>
19 #include <linux/moduleparam.h>
20 #include <linux/device.h>
21 #include <linux/usb/ch9.h>
22 #include <linux/usb/gadget.h>
23 #include <linux/pm.h>
24 #include <linux/io.h>
25 #include <linux/irq.h>
26 #include <linux/platform_device.h>
27 #include <linux/platform_data/mv_usb.h>
28 #include <linux/clk.h>
29
30 #include "mv_u3d.h"
31
32 #define DRIVER_DESC             "Marvell PXA USB3.0 Device Controller driver"
33
34 static const char driver_name[] = "mv_u3d";
35
36 static void mv_u3d_nuke(struct mv_u3d_ep *ep, int status);
37 static void mv_u3d_stop_activity(struct mv_u3d *u3d,
38                         struct usb_gadget_driver *driver);
39
40 /* for endpoint 0 operations */
41 static const struct usb_endpoint_descriptor mv_u3d_ep0_desc = {
42         .bLength =              USB_DT_ENDPOINT_SIZE,
43         .bDescriptorType =      USB_DT_ENDPOINT,
44         .bEndpointAddress =     0,
45         .bmAttributes =         USB_ENDPOINT_XFER_CONTROL,
46         .wMaxPacketSize =       MV_U3D_EP0_MAX_PKT_SIZE,
47 };
48
49 static void mv_u3d_ep0_reset(struct mv_u3d *u3d)
50 {
51         struct mv_u3d_ep *ep;
52         u32 epxcr;
53         int i;
54
55         for (i = 0; i < 2; i++) {
56                 ep = &u3d->eps[i];
57                 ep->u3d = u3d;
58
59                 /* ep0 ep context, ep0 in and out share the same ep context */
60                 ep->ep_context = &u3d->ep_context[1];
61         }
62
63         /* reset ep state machine */
64         /* reset ep0 out */
65         epxcr = ioread32(&u3d->vuc_regs->epcr[0].epxoutcr0);
66         epxcr |= MV_U3D_EPXCR_EP_INIT;
67         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxoutcr0);
68         udelay(5);
69         epxcr &= ~MV_U3D_EPXCR_EP_INIT;
70         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxoutcr0);
71
72         epxcr = ((MV_U3D_EP0_MAX_PKT_SIZE
73                 << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
74                 | (1 << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
75                 | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
76                 | MV_U3D_EPXCR_EP_TYPE_CONTROL);
77         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxoutcr1);
78
79         /* reset ep0 in */
80         epxcr = ioread32(&u3d->vuc_regs->epcr[0].epxincr0);
81         epxcr |= MV_U3D_EPXCR_EP_INIT;
82         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxincr0);
83         udelay(5);
84         epxcr &= ~MV_U3D_EPXCR_EP_INIT;
85         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxincr0);
86
87         epxcr = ((MV_U3D_EP0_MAX_PKT_SIZE
88                 << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
89                 | (1 << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
90                 | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
91                 | MV_U3D_EPXCR_EP_TYPE_CONTROL);
92         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxincr1);
93 }
94
95 static void mv_u3d_ep0_stall(struct mv_u3d *u3d)
96 {
97         u32 tmp;
98         dev_dbg(u3d->dev, "%s\n", __func__);
99
100         /* set TX and RX to stall */
101         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxoutcr0);
102         tmp |= MV_U3D_EPXCR_EP_HALT;
103         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxoutcr0);
104
105         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxincr0);
106         tmp |= MV_U3D_EPXCR_EP_HALT;
107         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxincr0);
108
109         /* update ep0 state */
110         u3d->ep0_state = MV_U3D_WAIT_FOR_SETUP;
111         u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
112 }
113
114 static int mv_u3d_process_ep_req(struct mv_u3d *u3d, int index,
115         struct mv_u3d_req *curr_req)
116 {
117         struct mv_u3d_trb       *curr_trb;
118         int actual, remaining_length = 0;
119         int direction, ep_num;
120         int retval = 0;
121         u32 tmp, status, length;
122
123         direction = index % 2;
124         ep_num = index / 2;
125
126         actual = curr_req->req.length;
127
128         while (!list_empty(&curr_req->trb_list)) {
129                 curr_trb = list_entry(curr_req->trb_list.next,
130                                         struct mv_u3d_trb, trb_list);
131                 if (!curr_trb->trb_hw->ctrl.own) {
132                         dev_err(u3d->dev, "%s, TRB own error!\n",
133                                 u3d->eps[index].name);
134                         return 1;
135                 }
136
137                 curr_trb->trb_hw->ctrl.own = 0;
138                 if (direction == MV_U3D_EP_DIR_OUT)
139                         tmp = ioread32(&u3d->vuc_regs->rxst[ep_num].statuslo);
140                 else
141                         tmp = ioread32(&u3d->vuc_regs->txst[ep_num].statuslo);
142
143                 status = tmp >> MV_U3D_XFERSTATUS_COMPLETE_SHIFT;
144                 length = tmp & MV_U3D_XFERSTATUS_TRB_LENGTH_MASK;
145
146                 if (status == MV_U3D_COMPLETE_SUCCESS ||
147                         (status == MV_U3D_COMPLETE_SHORT_PACKET &&
148                         direction == MV_U3D_EP_DIR_OUT)) {
149                         remaining_length += length;
150                         actual -= remaining_length;
151                 } else {
152                         dev_err(u3d->dev,
153                                 "complete_tr error: ep=%d %s: error = 0x%x\n",
154                                 index >> 1, direction ? "SEND" : "RECV",
155                                 status);
156                         retval = -EPROTO;
157                 }
158
159                 list_del_init(&curr_trb->trb_list);
160         }
161         if (retval)
162                 return retval;
163
164         curr_req->req.actual = actual;
165         return 0;
166 }
167
168 /*
169  * mv_u3d_done() - retire a request; caller blocked irqs
170  * @status : request status to be set, only works when
171  * request is still in progress.
172  */
173 static
174 void mv_u3d_done(struct mv_u3d_ep *ep, struct mv_u3d_req *req, int status)
175         __releases(&ep->udc->lock)
176         __acquires(&ep->udc->lock)
177 {
178         struct mv_u3d *u3d = (struct mv_u3d *)ep->u3d;
179
180         dev_dbg(u3d->dev, "mv_u3d_done: remove req->queue\n");
181         /* Removed the req from ep queue */
182         list_del_init(&req->queue);
183
184         /* req.status should be set as -EINPROGRESS in ep_queue() */
185         if (req->req.status == -EINPROGRESS)
186                 req->req.status = status;
187         else
188                 status = req->req.status;
189
190         /* Free trb for the request */
191         if (!req->chain)
192                 dma_pool_free(u3d->trb_pool,
193                         req->trb_head->trb_hw, req->trb_head->trb_dma);
194         else {
195                 dma_unmap_single(ep->u3d->gadget.dev.parent,
196                         (dma_addr_t)req->trb_head->trb_dma,
197                         req->trb_count * sizeof(struct mv_u3d_trb_hw),
198                         DMA_BIDIRECTIONAL);
199                 kfree(req->trb_head->trb_hw);
200         }
201         kfree(req->trb_head);
202
203         usb_gadget_unmap_request(&u3d->gadget, &req->req, mv_u3d_ep_dir(ep));
204
205         if (status && (status != -ESHUTDOWN)) {
206                 dev_dbg(u3d->dev, "complete %s req %p stat %d len %u/%u",
207                         ep->ep.name, &req->req, status,
208                         req->req.actual, req->req.length);
209         }
210
211         spin_unlock(&ep->u3d->lock);
212
213         usb_gadget_giveback_request(&ep->ep, &req->req);
214
215         spin_lock(&ep->u3d->lock);
216 }
217
218 static int mv_u3d_queue_trb(struct mv_u3d_ep *ep, struct mv_u3d_req *req)
219 {
220         u32 tmp, direction;
221         struct mv_u3d *u3d;
222         struct mv_u3d_ep_context *ep_context;
223         int retval = 0;
224
225         u3d = ep->u3d;
226         direction = mv_u3d_ep_dir(ep);
227
228         /* ep0 in and out share the same ep context slot 1*/
229         if (ep->ep_num == 0)
230                 ep_context = &(u3d->ep_context[1]);
231         else
232                 ep_context = &(u3d->ep_context[ep->ep_num * 2 + direction]);
233
234         /* check if the pipe is empty or not */
235         if (!list_empty(&ep->queue)) {
236                 dev_err(u3d->dev, "add trb to non-empty queue!\n");
237                 retval = -ENOMEM;
238                 WARN_ON(1);
239         } else {
240                 ep_context->rsvd0 = cpu_to_le32(1);
241                 ep_context->rsvd1 = 0;
242
243                 /* Configure the trb address and set the DCS bit.
244                  * Both DCS bit and own bit in trb should be set.
245                  */
246                 ep_context->trb_addr_lo =
247                         cpu_to_le32(req->trb_head->trb_dma | DCS_ENABLE);
248                 ep_context->trb_addr_hi = 0;
249
250                 /* Ensure that updates to the EP Context will
251                  * occure before Ring Bell.
252                  */
253                 wmb();
254
255                 /* ring bell the ep */
256                 if (ep->ep_num == 0)
257                         tmp = 0x1;
258                 else
259                         tmp = ep->ep_num * 2
260                                 + ((direction == MV_U3D_EP_DIR_OUT) ? 0 : 1);
261
262                 iowrite32(tmp, &u3d->op_regs->doorbell);
263         }
264         return retval;
265 }
266
267 static struct mv_u3d_trb *mv_u3d_build_trb_one(struct mv_u3d_req *req,
268                                 unsigned *length, dma_addr_t *dma)
269 {
270         u32 temp;
271         unsigned int direction;
272         struct mv_u3d_trb *trb;
273         struct mv_u3d_trb_hw *trb_hw;
274         struct mv_u3d *u3d;
275
276         /* how big will this transfer be? */
277         *length = req->req.length - req->req.actual;
278         BUG_ON(*length > (unsigned)MV_U3D_EP_MAX_LENGTH_TRANSFER);
279
280         u3d = req->ep->u3d;
281
282         trb = kzalloc(sizeof(*trb), GFP_ATOMIC);
283         if (!trb)
284                 return NULL;
285
286         /*
287          * Be careful that no _GFP_HIGHMEM is set,
288          * or we can not use dma_to_virt
289          * cannot use GFP_KERNEL in spin lock
290          */
291         trb_hw = dma_pool_alloc(u3d->trb_pool, GFP_ATOMIC, dma);
292         if (!trb_hw) {
293                 kfree(trb);
294                 dev_err(u3d->dev,
295                         "%s, dma_pool_alloc fail\n", __func__);
296                 return NULL;
297         }
298         trb->trb_dma = *dma;
299         trb->trb_hw = trb_hw;
300
301         /* initialize buffer page pointers */
302         temp = (u32)(req->req.dma + req->req.actual);
303
304         trb_hw->buf_addr_lo = cpu_to_le32(temp);
305         trb_hw->buf_addr_hi = 0;
306         trb_hw->trb_len = cpu_to_le32(*length);
307         trb_hw->ctrl.own = 1;
308
309         if (req->ep->ep_num == 0)
310                 trb_hw->ctrl.type = TYPE_DATA;
311         else
312                 trb_hw->ctrl.type = TYPE_NORMAL;
313
314         req->req.actual += *length;
315
316         direction = mv_u3d_ep_dir(req->ep);
317         if (direction == MV_U3D_EP_DIR_IN)
318                 trb_hw->ctrl.dir = 1;
319         else
320                 trb_hw->ctrl.dir = 0;
321
322         /* Enable interrupt for the last trb of a request */
323         if (!req->req.no_interrupt)
324                 trb_hw->ctrl.ioc = 1;
325
326         trb_hw->ctrl.chain = 0;
327
328         wmb();
329         return trb;
330 }
331
332 static int mv_u3d_build_trb_chain(struct mv_u3d_req *req, unsigned *length,
333                 struct mv_u3d_trb *trb, int *is_last)
334 {
335         u32 temp;
336         unsigned int direction;
337         struct mv_u3d *u3d;
338
339         /* how big will this transfer be? */
340         *length = min(req->req.length - req->req.actual,
341                         (unsigned)MV_U3D_EP_MAX_LENGTH_TRANSFER);
342
343         u3d = req->ep->u3d;
344
345         trb->trb_dma = 0;
346
347         /* initialize buffer page pointers */
348         temp = (u32)(req->req.dma + req->req.actual);
349
350         trb->trb_hw->buf_addr_lo = cpu_to_le32(temp);
351         trb->trb_hw->buf_addr_hi = 0;
352         trb->trb_hw->trb_len = cpu_to_le32(*length);
353         trb->trb_hw->ctrl.own = 1;
354
355         if (req->ep->ep_num == 0)
356                 trb->trb_hw->ctrl.type = TYPE_DATA;
357         else
358                 trb->trb_hw->ctrl.type = TYPE_NORMAL;
359
360         req->req.actual += *length;
361
362         direction = mv_u3d_ep_dir(req->ep);
363         if (direction == MV_U3D_EP_DIR_IN)
364                 trb->trb_hw->ctrl.dir = 1;
365         else
366                 trb->trb_hw->ctrl.dir = 0;
367
368         /* zlp is needed if req->req.zero is set */
369         if (req->req.zero) {
370                 if (*length == 0 || (*length % req->ep->ep.maxpacket) != 0)
371                         *is_last = 1;
372                 else
373                         *is_last = 0;
374         } else if (req->req.length == req->req.actual)
375                 *is_last = 1;
376         else
377                 *is_last = 0;
378
379         /* Enable interrupt for the last trb of a request */
380         if (*is_last && !req->req.no_interrupt)
381                 trb->trb_hw->ctrl.ioc = 1;
382
383         if (*is_last)
384                 trb->trb_hw->ctrl.chain = 0;
385         else {
386                 trb->trb_hw->ctrl.chain = 1;
387                 dev_dbg(u3d->dev, "chain trb\n");
388         }
389
390         wmb();
391
392         return 0;
393 }
394
395 /* generate TRB linked list for a request
396  * usb controller only supports continous trb chain,
397  * that trb structure physical address should be continous.
398  */
399 static int mv_u3d_req_to_trb(struct mv_u3d_req *req)
400 {
401         unsigned count;
402         int is_last;
403         struct mv_u3d_trb *trb;
404         struct mv_u3d_trb_hw *trb_hw;
405         struct mv_u3d *u3d;
406         dma_addr_t dma;
407         unsigned length;
408         unsigned trb_num;
409
410         u3d = req->ep->u3d;
411
412         INIT_LIST_HEAD(&req->trb_list);
413
414         length = req->req.length - req->req.actual;
415         /* normally the request transfer length is less than 16KB.
416          * we use buil_trb_one() to optimize it.
417          */
418         if (length <= (unsigned)MV_U3D_EP_MAX_LENGTH_TRANSFER) {
419                 trb = mv_u3d_build_trb_one(req, &count, &dma);
420                 list_add_tail(&trb->trb_list, &req->trb_list);
421                 req->trb_head = trb;
422                 req->trb_count = 1;
423                 req->chain = 0;
424         } else {
425                 trb_num = length / MV_U3D_EP_MAX_LENGTH_TRANSFER;
426                 if (length % MV_U3D_EP_MAX_LENGTH_TRANSFER)
427                         trb_num++;
428
429                 trb = kcalloc(trb_num, sizeof(*trb), GFP_ATOMIC);
430                 if (!trb)
431                         return -ENOMEM;
432
433                 trb_hw = kcalloc(trb_num, sizeof(*trb_hw), GFP_ATOMIC);
434                 if (!trb_hw) {
435                         kfree(trb);
436                         return -ENOMEM;
437                 }
438
439                 do {
440                         trb->trb_hw = trb_hw;
441                         if (mv_u3d_build_trb_chain(req, &count,
442                                                 trb, &is_last)) {
443                                 dev_err(u3d->dev,
444                                         "%s, mv_u3d_build_trb_chain fail\n",
445                                         __func__);
446                                 return -EIO;
447                         }
448
449                         list_add_tail(&trb->trb_list, &req->trb_list);
450                         req->trb_count++;
451                         trb++;
452                         trb_hw++;
453                 } while (!is_last);
454
455                 req->trb_head = list_entry(req->trb_list.next,
456                                         struct mv_u3d_trb, trb_list);
457                 req->trb_head->trb_dma = dma_map_single(u3d->gadget.dev.parent,
458                                         req->trb_head->trb_hw,
459                                         trb_num * sizeof(*trb_hw),
460                                         DMA_BIDIRECTIONAL);
461                 if (dma_mapping_error(u3d->gadget.dev.parent,
462                                         req->trb_head->trb_dma)) {
463                         kfree(req->trb_head->trb_hw);
464                         kfree(req->trb_head);
465                         return -EFAULT;
466                 }
467
468                 req->chain = 1;
469         }
470
471         return 0;
472 }
473
474 static int
475 mv_u3d_start_queue(struct mv_u3d_ep *ep)
476 {
477         struct mv_u3d *u3d = ep->u3d;
478         struct mv_u3d_req *req;
479         int ret;
480
481         if (!list_empty(&ep->req_list) && !ep->processing)
482                 req = list_entry(ep->req_list.next, struct mv_u3d_req, list);
483         else
484                 return 0;
485
486         ep->processing = 1;
487
488         /* set up dma mapping */
489         ret = usb_gadget_map_request(&u3d->gadget, &req->req,
490                                         mv_u3d_ep_dir(ep));
491         if (ret)
492                 goto break_processing;
493
494         req->req.status = -EINPROGRESS;
495         req->req.actual = 0;
496         req->trb_count = 0;
497
498         /* build trbs */
499         ret = mv_u3d_req_to_trb(req);
500         if (ret) {
501                 dev_err(u3d->dev, "%s, mv_u3d_req_to_trb fail\n", __func__);
502                 goto break_processing;
503         }
504
505         /* and push them to device queue */
506         ret = mv_u3d_queue_trb(ep, req);
507         if (ret)
508                 goto break_processing;
509
510         /* irq handler advances the queue */
511         list_add_tail(&req->queue, &ep->queue);
512
513         return 0;
514
515 break_processing:
516         ep->processing = 0;
517         return ret;
518 }
519
520 static int mv_u3d_ep_enable(struct usb_ep *_ep,
521                 const struct usb_endpoint_descriptor *desc)
522 {
523         struct mv_u3d *u3d;
524         struct mv_u3d_ep *ep;
525         u16 max = 0;
526         unsigned maxburst = 0;
527         u32 epxcr, direction;
528
529         if (!_ep || !desc || desc->bDescriptorType != USB_DT_ENDPOINT)
530                 return -EINVAL;
531
532         ep = container_of(_ep, struct mv_u3d_ep, ep);
533         u3d = ep->u3d;
534
535         if (!u3d->driver || u3d->gadget.speed == USB_SPEED_UNKNOWN)
536                 return -ESHUTDOWN;
537
538         direction = mv_u3d_ep_dir(ep);
539         max = le16_to_cpu(desc->wMaxPacketSize);
540
541         if (!_ep->maxburst)
542                 _ep->maxburst = 1;
543         maxburst = _ep->maxburst;
544
545         /* Set the max burst size */
546         switch (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK) {
547         case USB_ENDPOINT_XFER_BULK:
548                 if (maxburst > 16) {
549                         dev_dbg(u3d->dev,
550                                 "max burst should not be greater "
551                                 "than 16 on bulk ep\n");
552                         maxburst = 1;
553                         _ep->maxburst = maxburst;
554                 }
555                 dev_dbg(u3d->dev,
556                         "maxburst: %d on bulk %s\n", maxburst, ep->name);
557                 break;
558         case USB_ENDPOINT_XFER_CONTROL:
559                 /* control transfer only supports maxburst as one */
560                 maxburst = 1;
561                 _ep->maxburst = maxburst;
562                 break;
563         case USB_ENDPOINT_XFER_INT:
564                 if (maxburst != 1) {
565                         dev_dbg(u3d->dev,
566                                 "max burst should be 1 on int ep "
567                                 "if transfer size is not 1024\n");
568                         maxburst = 1;
569                         _ep->maxburst = maxburst;
570                 }
571                 break;
572         case USB_ENDPOINT_XFER_ISOC:
573                 if (maxburst != 1) {
574                         dev_dbg(u3d->dev,
575                                 "max burst should be 1 on isoc ep "
576                                 "if transfer size is not 1024\n");
577                         maxburst = 1;
578                         _ep->maxburst = maxburst;
579                 }
580                 break;
581         default:
582                 goto en_done;
583         }
584
585         ep->ep.maxpacket = max;
586         ep->ep.desc = desc;
587         ep->enabled = 1;
588
589         /* Enable the endpoint for Rx or Tx and set the endpoint type */
590         if (direction == MV_U3D_EP_DIR_OUT) {
591                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
592                 epxcr |= MV_U3D_EPXCR_EP_INIT;
593                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
594                 udelay(5);
595                 epxcr &= ~MV_U3D_EPXCR_EP_INIT;
596                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
597
598                 epxcr = ((max << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
599                       | ((maxburst - 1) << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
600                       | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
601                       | (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK));
602                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr1);
603         } else {
604                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
605                 epxcr |= MV_U3D_EPXCR_EP_INIT;
606                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
607                 udelay(5);
608                 epxcr &= ~MV_U3D_EPXCR_EP_INIT;
609                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
610
611                 epxcr = ((max << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
612                       | ((maxburst - 1) << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
613                       | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
614                       | (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK));
615                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr1);
616         }
617
618         return 0;
619 en_done:
620         return -EINVAL;
621 }
622
623 static int  mv_u3d_ep_disable(struct usb_ep *_ep)
624 {
625         struct mv_u3d *u3d;
626         struct mv_u3d_ep *ep;
627         u32 epxcr, direction;
628         unsigned long flags;
629
630         if (!_ep)
631                 return -EINVAL;
632
633         ep = container_of(_ep, struct mv_u3d_ep, ep);
634         if (!ep->ep.desc)
635                 return -EINVAL;
636
637         u3d = ep->u3d;
638
639         direction = mv_u3d_ep_dir(ep);
640
641         /* nuke all pending requests (does flush) */
642         spin_lock_irqsave(&u3d->lock, flags);
643         mv_u3d_nuke(ep, -ESHUTDOWN);
644         spin_unlock_irqrestore(&u3d->lock, flags);
645
646         /* Disable the endpoint for Rx or Tx and reset the endpoint type */
647         if (direction == MV_U3D_EP_DIR_OUT) {
648                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr1);
649                 epxcr &= ~((1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
650                       | USB_ENDPOINT_XFERTYPE_MASK);
651                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr1);
652         } else {
653                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr1);
654                 epxcr &= ~((1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
655                       | USB_ENDPOINT_XFERTYPE_MASK);
656                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr1);
657         }
658
659         ep->enabled = 0;
660
661         ep->ep.desc = NULL;
662         return 0;
663 }
664
665 static struct usb_request *
666 mv_u3d_alloc_request(struct usb_ep *_ep, gfp_t gfp_flags)
667 {
668         struct mv_u3d_req *req;
669
670         req = kzalloc(sizeof *req, gfp_flags);
671         if (!req)
672                 return NULL;
673
674         INIT_LIST_HEAD(&req->queue);
675
676         return &req->req;
677 }
678
679 static void mv_u3d_free_request(struct usb_ep *_ep, struct usb_request *_req)
680 {
681         struct mv_u3d_req *req = container_of(_req, struct mv_u3d_req, req);
682
683         kfree(req);
684 }
685
686 static void mv_u3d_ep_fifo_flush(struct usb_ep *_ep)
687 {
688         struct mv_u3d *u3d;
689         u32 direction;
690         struct mv_u3d_ep *ep = container_of(_ep, struct mv_u3d_ep, ep);
691         unsigned int loops;
692         u32 tmp;
693
694         /* if endpoint is not enabled, cannot flush endpoint */
695         if (!ep->enabled)
696                 return;
697
698         u3d = ep->u3d;
699         direction = mv_u3d_ep_dir(ep);
700
701         /* ep0 need clear bit after flushing fifo. */
702         if (!ep->ep_num) {
703                 if (direction == MV_U3D_EP_DIR_OUT) {
704                         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxoutcr0);
705                         tmp |= MV_U3D_EPXCR_EP_FLUSH;
706                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxoutcr0);
707                         udelay(10);
708                         tmp &= ~MV_U3D_EPXCR_EP_FLUSH;
709                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxoutcr0);
710                 } else {
711                         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxincr0);
712                         tmp |= MV_U3D_EPXCR_EP_FLUSH;
713                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxincr0);
714                         udelay(10);
715                         tmp &= ~MV_U3D_EPXCR_EP_FLUSH;
716                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxincr0);
717                 }
718                 return;
719         }
720
721         if (direction == MV_U3D_EP_DIR_OUT) {
722                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
723                 tmp |= MV_U3D_EPXCR_EP_FLUSH;
724                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
725
726                 /* Wait until flushing completed */
727                 loops = LOOPS(MV_U3D_FLUSH_TIMEOUT);
728                 while (ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0) &
729                         MV_U3D_EPXCR_EP_FLUSH) {
730                         /*
731                          * EP_FLUSH bit should be cleared to indicate this
732                          * operation is complete
733                          */
734                         if (loops == 0) {
735                                 dev_dbg(u3d->dev,
736                                     "EP FLUSH TIMEOUT for ep%d%s\n", ep->ep_num,
737                                     direction ? "in" : "out");
738                                 return;
739                         }
740                         loops--;
741                         udelay(LOOPS_USEC);
742                 }
743         } else {        /* EP_DIR_IN */
744                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
745                 tmp |= MV_U3D_EPXCR_EP_FLUSH;
746                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
747
748                 /* Wait until flushing completed */
749                 loops = LOOPS(MV_U3D_FLUSH_TIMEOUT);
750                 while (ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0) &
751                         MV_U3D_EPXCR_EP_FLUSH) {
752                         /*
753                         * EP_FLUSH bit should be cleared to indicate this
754                         * operation is complete
755                         */
756                         if (loops == 0) {
757                                 dev_dbg(u3d->dev,
758                                     "EP FLUSH TIMEOUT for ep%d%s\n", ep->ep_num,
759                                     direction ? "in" : "out");
760                                 return;
761                         }
762                         loops--;
763                         udelay(LOOPS_USEC);
764                 }
765         }
766 }
767
768 /* queues (submits) an I/O request to an endpoint */
769 static int
770 mv_u3d_ep_queue(struct usb_ep *_ep, struct usb_request *_req, gfp_t gfp_flags)
771 {
772         struct mv_u3d_ep *ep;
773         struct mv_u3d_req *req;
774         struct mv_u3d *u3d;
775         unsigned long flags;
776         int is_first_req = 0;
777
778         if (unlikely(!_ep || !_req))
779                 return -EINVAL;
780
781         ep = container_of(_ep, struct mv_u3d_ep, ep);
782         u3d = ep->u3d;
783
784         req = container_of(_req, struct mv_u3d_req, req);
785
786         if (!ep->ep_num
787                 && u3d->ep0_state == MV_U3D_STATUS_STAGE
788                 && !_req->length) {
789                 dev_dbg(u3d->dev, "ep0 status stage\n");
790                 u3d->ep0_state = MV_U3D_WAIT_FOR_SETUP;
791                 return 0;
792         }
793
794         dev_dbg(u3d->dev, "%s: %s, req: 0x%p\n",
795                         __func__, _ep->name, req);
796
797         /* catch various bogus parameters */
798         if (!req->req.complete || !req->req.buf
799                         || !list_empty(&req->queue)) {
800                 dev_err(u3d->dev,
801                         "%s, bad params, _req: 0x%p,"
802                         "req->req.complete: 0x%p, req->req.buf: 0x%p,"
803                         "list_empty: 0x%x\n",
804                         __func__, _req,
805                         req->req.complete, req->req.buf,
806                         list_empty(&req->queue));
807                 return -EINVAL;
808         }
809         if (unlikely(!ep->ep.desc)) {
810                 dev_err(u3d->dev, "%s, bad ep\n", __func__);
811                 return -EINVAL;
812         }
813         if (ep->ep.desc->bmAttributes == USB_ENDPOINT_XFER_ISOC) {
814                 if (req->req.length > ep->ep.maxpacket)
815                         return -EMSGSIZE;
816         }
817
818         if (!u3d->driver || u3d->gadget.speed == USB_SPEED_UNKNOWN) {
819                 dev_err(u3d->dev,
820                         "bad params of driver/speed\n");
821                 return -ESHUTDOWN;
822         }
823
824         req->ep = ep;
825
826         /* Software list handles usb request. */
827         spin_lock_irqsave(&ep->req_lock, flags);
828         is_first_req = list_empty(&ep->req_list);
829         list_add_tail(&req->list, &ep->req_list);
830         spin_unlock_irqrestore(&ep->req_lock, flags);
831         if (!is_first_req) {
832                 dev_dbg(u3d->dev, "list is not empty\n");
833                 return 0;
834         }
835
836         dev_dbg(u3d->dev, "call mv_u3d_start_queue from usb_ep_queue\n");
837         spin_lock_irqsave(&u3d->lock, flags);
838         mv_u3d_start_queue(ep);
839         spin_unlock_irqrestore(&u3d->lock, flags);
840         return 0;
841 }
842
843 /* dequeues (cancels, unlinks) an I/O request from an endpoint */
844 static int mv_u3d_ep_dequeue(struct usb_ep *_ep, struct usb_request *_req)
845 {
846         struct mv_u3d_ep *ep;
847         struct mv_u3d_req *req = NULL, *iter;
848         struct mv_u3d *u3d;
849         struct mv_u3d_ep_context *ep_context;
850         struct mv_u3d_req *next_req;
851
852         unsigned long flags;
853         int ret = 0;
854
855         if (!_ep || !_req)
856                 return -EINVAL;
857
858         ep = container_of(_ep, struct mv_u3d_ep, ep);
859         u3d = ep->u3d;
860
861         spin_lock_irqsave(&ep->u3d->lock, flags);
862
863         /* make sure it's actually queued on this endpoint */
864         list_for_each_entry(iter, &ep->queue, queue) {
865                 if (&iter->req != _req)
866                         continue;
867                 req = iter;
868                 break;
869         }
870         if (!req) {
871                 ret = -EINVAL;
872                 goto out;
873         }
874
875         /* The request is in progress, or completed but not dequeued */
876         if (ep->queue.next == &req->queue) {
877                 _req->status = -ECONNRESET;
878                 mv_u3d_ep_fifo_flush(_ep);
879
880                 /* The request isn't the last request in this ep queue */
881                 if (req->queue.next != &ep->queue) {
882                         dev_dbg(u3d->dev,
883                                 "it is the last request in this ep queue\n");
884                         ep_context = ep->ep_context;
885                         next_req = list_entry(req->queue.next,
886                                         struct mv_u3d_req, queue);
887
888                         /* Point first TRB of next request to the EP context. */
889                         iowrite32((unsigned long) next_req->trb_head,
890                                         &ep_context->trb_addr_lo);
891                 } else {
892                         struct mv_u3d_ep_context *ep_context;
893                         ep_context = ep->ep_context;
894                         ep_context->trb_addr_lo = 0;
895                         ep_context->trb_addr_hi = 0;
896                 }
897
898         } else
899                 WARN_ON(1);
900
901         mv_u3d_done(ep, req, -ECONNRESET);
902
903         /* remove the req from the ep req list */
904         if (!list_empty(&ep->req_list)) {
905                 struct mv_u3d_req *curr_req;
906                 curr_req = list_entry(ep->req_list.next,
907                                         struct mv_u3d_req, list);
908                 if (curr_req == req) {
909                         list_del_init(&req->list);
910                         ep->processing = 0;
911                 }
912         }
913
914 out:
915         spin_unlock_irqrestore(&ep->u3d->lock, flags);
916         return ret;
917 }
918
919 static void
920 mv_u3d_ep_set_stall(struct mv_u3d *u3d, u8 ep_num, u8 direction, int stall)
921 {
922         u32 tmp;
923         struct mv_u3d_ep *ep = u3d->eps;
924
925         dev_dbg(u3d->dev, "%s\n", __func__);
926         if (direction == MV_U3D_EP_DIR_OUT) {
927                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
928                 if (stall)
929                         tmp |= MV_U3D_EPXCR_EP_HALT;
930                 else
931                         tmp &= ~MV_U3D_EPXCR_EP_HALT;
932                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
933         } else {
934                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
935                 if (stall)
936                         tmp |= MV_U3D_EPXCR_EP_HALT;
937                 else
938                         tmp &= ~MV_U3D_EPXCR_EP_HALT;
939                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
940         }
941 }
942
943 static int mv_u3d_ep_set_halt_wedge(struct usb_ep *_ep, int halt, int wedge)
944 {
945         struct mv_u3d_ep *ep;
946         unsigned long flags;
947         int status = 0;
948         struct mv_u3d *u3d;
949
950         ep = container_of(_ep, struct mv_u3d_ep, ep);
951         u3d = ep->u3d;
952         if (!ep->ep.desc) {
953                 status = -EINVAL;
954                 goto out;
955         }
956
957         if (ep->ep.desc->bmAttributes == USB_ENDPOINT_XFER_ISOC) {
958                 status = -EOPNOTSUPP;
959                 goto out;
960         }
961
962         /*
963          * Attempt to halt IN ep will fail if any transfer requests
964          * are still queue
965          */
966         if (halt && (mv_u3d_ep_dir(ep) == MV_U3D_EP_DIR_IN)
967                         && !list_empty(&ep->queue)) {
968                 status = -EAGAIN;
969                 goto out;
970         }
971
972         spin_lock_irqsave(&ep->u3d->lock, flags);
973         mv_u3d_ep_set_stall(u3d, ep->ep_num, mv_u3d_ep_dir(ep), halt);
974         if (halt && wedge)
975                 ep->wedge = 1;
976         else if (!halt)
977                 ep->wedge = 0;
978         spin_unlock_irqrestore(&ep->u3d->lock, flags);
979
980         if (ep->ep_num == 0)
981                 u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
982 out:
983         return status;
984 }
985
986 static int mv_u3d_ep_set_halt(struct usb_ep *_ep, int halt)
987 {
988         return mv_u3d_ep_set_halt_wedge(_ep, halt, 0);
989 }
990
991 static int mv_u3d_ep_set_wedge(struct usb_ep *_ep)
992 {
993         return mv_u3d_ep_set_halt_wedge(_ep, 1, 1);
994 }
995
996 static const struct usb_ep_ops mv_u3d_ep_ops = {
997         .enable         = mv_u3d_ep_enable,
998         .disable        = mv_u3d_ep_disable,
999
1000         .alloc_request  = mv_u3d_alloc_request,
1001         .free_request   = mv_u3d_free_request,
1002
1003         .queue          = mv_u3d_ep_queue,
1004         .dequeue        = mv_u3d_ep_dequeue,
1005
1006         .set_wedge      = mv_u3d_ep_set_wedge,
1007         .set_halt       = mv_u3d_ep_set_halt,
1008         .fifo_flush     = mv_u3d_ep_fifo_flush,
1009 };
1010
1011 static void mv_u3d_controller_stop(struct mv_u3d *u3d)
1012 {
1013         u32 tmp;
1014
1015         if (!u3d->clock_gating && u3d->vbus_valid_detect)
1016                 iowrite32(MV_U3D_INTR_ENABLE_VBUS_VALID,
1017                                 &u3d->vuc_regs->intrenable);
1018         else
1019                 iowrite32(0, &u3d->vuc_regs->intrenable);
1020         iowrite32(~0x0, &u3d->vuc_regs->endcomplete);
1021         iowrite32(~0x0, &u3d->vuc_regs->trbunderrun);
1022         iowrite32(~0x0, &u3d->vuc_regs->trbcomplete);
1023         iowrite32(~0x0, &u3d->vuc_regs->linkchange);
1024         iowrite32(0x1, &u3d->vuc_regs->setuplock);
1025
1026         /* Reset the RUN bit in the command register to stop USB */
1027         tmp = ioread32(&u3d->op_regs->usbcmd);
1028         tmp &= ~MV_U3D_CMD_RUN_STOP;
1029         iowrite32(tmp, &u3d->op_regs->usbcmd);
1030         dev_dbg(u3d->dev, "after u3d_stop, USBCMD 0x%x\n",
1031                 ioread32(&u3d->op_regs->usbcmd));
1032 }
1033
1034 static void mv_u3d_controller_start(struct mv_u3d *u3d)
1035 {
1036         u32 usbintr;
1037         u32 temp;
1038
1039         /* enable link LTSSM state machine */
1040         temp = ioread32(&u3d->vuc_regs->ltssm);
1041         temp |= MV_U3D_LTSSM_PHY_INIT_DONE;
1042         iowrite32(temp, &u3d->vuc_regs->ltssm);
1043
1044         /* Enable interrupts */
1045         usbintr = MV_U3D_INTR_ENABLE_LINK_CHG | MV_U3D_INTR_ENABLE_TXDESC_ERR |
1046                 MV_U3D_INTR_ENABLE_RXDESC_ERR | MV_U3D_INTR_ENABLE_TX_COMPLETE |
1047                 MV_U3D_INTR_ENABLE_RX_COMPLETE | MV_U3D_INTR_ENABLE_SETUP |
1048                 (u3d->vbus_valid_detect ? MV_U3D_INTR_ENABLE_VBUS_VALID : 0);
1049         iowrite32(usbintr, &u3d->vuc_regs->intrenable);
1050
1051         /* Enable ctrl ep */
1052         iowrite32(0x1, &u3d->vuc_regs->ctrlepenable);
1053
1054         /* Set the Run bit in the command register */
1055         iowrite32(MV_U3D_CMD_RUN_STOP, &u3d->op_regs->usbcmd);
1056         dev_dbg(u3d->dev, "after u3d_start, USBCMD 0x%x\n",
1057                 ioread32(&u3d->op_regs->usbcmd));
1058 }
1059
1060 static int mv_u3d_controller_reset(struct mv_u3d *u3d)
1061 {
1062         unsigned int loops;
1063         u32 tmp;
1064
1065         /* Stop the controller */
1066         tmp = ioread32(&u3d->op_regs->usbcmd);
1067         tmp &= ~MV_U3D_CMD_RUN_STOP;
1068         iowrite32(tmp, &u3d->op_regs->usbcmd);
1069
1070         /* Reset the controller to get default values */
1071         iowrite32(MV_U3D_CMD_CTRL_RESET, &u3d->op_regs->usbcmd);
1072
1073         /* wait for reset to complete */
1074         loops = LOOPS(MV_U3D_RESET_TIMEOUT);
1075         while (ioread32(&u3d->op_regs->usbcmd) & MV_U3D_CMD_CTRL_RESET) {
1076                 if (loops == 0) {
1077                         dev_err(u3d->dev,
1078                                 "Wait for RESET completed TIMEOUT\n");
1079                         return -ETIMEDOUT;
1080                 }
1081                 loops--;
1082                 udelay(LOOPS_USEC);
1083         }
1084
1085         /* Configure the Endpoint Context Address */
1086         iowrite32(u3d->ep_context_dma, &u3d->op_regs->dcbaapl);
1087         iowrite32(0, &u3d->op_regs->dcbaaph);
1088
1089         return 0;
1090 }
1091
1092 static int mv_u3d_enable(struct mv_u3d *u3d)
1093 {
1094         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1095         int retval;
1096
1097         if (u3d->active)
1098                 return 0;
1099
1100         if (!u3d->clock_gating) {
1101                 u3d->active = 1;
1102                 return 0;
1103         }
1104
1105         dev_dbg(u3d->dev, "enable u3d\n");
1106         clk_enable(u3d->clk);
1107         if (pdata->phy_init) {
1108                 retval = pdata->phy_init(u3d->phy_regs);
1109                 if (retval) {
1110                         dev_err(u3d->dev,
1111                                 "init phy error %d\n", retval);
1112                         clk_disable(u3d->clk);
1113                         return retval;
1114                 }
1115         }
1116         u3d->active = 1;
1117
1118         return 0;
1119 }
1120
1121 static void mv_u3d_disable(struct mv_u3d *u3d)
1122 {
1123         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1124         if (u3d->clock_gating && u3d->active) {
1125                 dev_dbg(u3d->dev, "disable u3d\n");
1126                 if (pdata->phy_deinit)
1127                         pdata->phy_deinit(u3d->phy_regs);
1128                 clk_disable(u3d->clk);
1129                 u3d->active = 0;
1130         }
1131 }
1132
1133 static int mv_u3d_vbus_session(struct usb_gadget *gadget, int is_active)
1134 {
1135         struct mv_u3d *u3d;
1136         unsigned long flags;
1137         int retval = 0;
1138
1139         u3d = container_of(gadget, struct mv_u3d, gadget);
1140
1141         spin_lock_irqsave(&u3d->lock, flags);
1142
1143         u3d->vbus_active = (is_active != 0);
1144         dev_dbg(u3d->dev, "%s: softconnect %d, vbus_active %d\n",
1145                 __func__, u3d->softconnect, u3d->vbus_active);
1146         /*
1147          * 1. external VBUS detect: we can disable/enable clock on demand.
1148          * 2. UDC VBUS detect: we have to enable clock all the time.
1149          * 3. No VBUS detect: we have to enable clock all the time.
1150          */
1151         if (u3d->driver && u3d->softconnect && u3d->vbus_active) {
1152                 retval = mv_u3d_enable(u3d);
1153                 if (retval == 0) {
1154                         /*
1155                          * after clock is disabled, we lost all the register
1156                          *  context. We have to re-init registers
1157                          */
1158                         mv_u3d_controller_reset(u3d);
1159                         mv_u3d_ep0_reset(u3d);
1160                         mv_u3d_controller_start(u3d);
1161                 }
1162         } else if (u3d->driver && u3d->softconnect) {
1163                 if (!u3d->active)
1164                         goto out;
1165
1166                 /* stop all the transfer in queue*/
1167                 mv_u3d_stop_activity(u3d, u3d->driver);
1168                 mv_u3d_controller_stop(u3d);
1169                 mv_u3d_disable(u3d);
1170         }
1171
1172 out:
1173         spin_unlock_irqrestore(&u3d->lock, flags);
1174         return retval;
1175 }
1176
1177 /* constrain controller's VBUS power usage
1178  * This call is used by gadget drivers during SET_CONFIGURATION calls,
1179  * reporting how much power the device may consume.  For example, this
1180  * could affect how quickly batteries are recharged.
1181  *
1182  * Returns zero on success, else negative errno.
1183  */
1184 static int mv_u3d_vbus_draw(struct usb_gadget *gadget, unsigned mA)
1185 {
1186         struct mv_u3d *u3d = container_of(gadget, struct mv_u3d, gadget);
1187
1188         u3d->power = mA;
1189
1190         return 0;
1191 }
1192
1193 static int mv_u3d_pullup(struct usb_gadget *gadget, int is_on)
1194 {
1195         struct mv_u3d *u3d = container_of(gadget, struct mv_u3d, gadget);
1196         unsigned long flags;
1197         int retval = 0;
1198
1199         spin_lock_irqsave(&u3d->lock, flags);
1200
1201         dev_dbg(u3d->dev, "%s: softconnect %d, vbus_active %d\n",
1202                 __func__, u3d->softconnect, u3d->vbus_active);
1203         u3d->softconnect = (is_on != 0);
1204         if (u3d->driver && u3d->softconnect && u3d->vbus_active) {
1205                 retval = mv_u3d_enable(u3d);
1206                 if (retval == 0) {
1207                         /*
1208                          * after clock is disabled, we lost all the register
1209                          *  context. We have to re-init registers
1210                          */
1211                         mv_u3d_controller_reset(u3d);
1212                         mv_u3d_ep0_reset(u3d);
1213                         mv_u3d_controller_start(u3d);
1214                 }
1215         } else if (u3d->driver && u3d->vbus_active) {
1216                 /* stop all the transfer in queue*/
1217                 mv_u3d_stop_activity(u3d, u3d->driver);
1218                 mv_u3d_controller_stop(u3d);
1219                 mv_u3d_disable(u3d);
1220         }
1221
1222         spin_unlock_irqrestore(&u3d->lock, flags);
1223
1224         return retval;
1225 }
1226
1227 static int mv_u3d_start(struct usb_gadget *g,
1228                 struct usb_gadget_driver *driver)
1229 {
1230         struct mv_u3d *u3d = container_of(g, struct mv_u3d, gadget);
1231         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1232         unsigned long flags;
1233
1234         if (u3d->driver)
1235                 return -EBUSY;
1236
1237         spin_lock_irqsave(&u3d->lock, flags);
1238
1239         if (!u3d->clock_gating) {
1240                 clk_enable(u3d->clk);
1241                 if (pdata->phy_init)
1242                         pdata->phy_init(u3d->phy_regs);
1243         }
1244
1245         /* hook up the driver ... */
1246         u3d->driver = driver;
1247
1248         u3d->ep0_dir = USB_DIR_OUT;
1249
1250         spin_unlock_irqrestore(&u3d->lock, flags);
1251
1252         u3d->vbus_valid_detect = 1;
1253
1254         return 0;
1255 }
1256
1257 static int mv_u3d_stop(struct usb_gadget *g)
1258 {
1259         struct mv_u3d *u3d = container_of(g, struct mv_u3d, gadget);
1260         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1261         unsigned long flags;
1262
1263         u3d->vbus_valid_detect = 0;
1264         spin_lock_irqsave(&u3d->lock, flags);
1265
1266         /* enable clock to access controller register */
1267         clk_enable(u3d->clk);
1268         if (pdata->phy_init)
1269                 pdata->phy_init(u3d->phy_regs);
1270
1271         mv_u3d_controller_stop(u3d);
1272         /* stop all usb activities */
1273         u3d->gadget.speed = USB_SPEED_UNKNOWN;
1274         mv_u3d_stop_activity(u3d, NULL);
1275         mv_u3d_disable(u3d);
1276
1277         if (pdata->phy_deinit)
1278                 pdata->phy_deinit(u3d->phy_regs);
1279         clk_disable(u3d->clk);
1280
1281         spin_unlock_irqrestore(&u3d->lock, flags);
1282
1283         u3d->driver = NULL;
1284
1285         return 0;
1286 }
1287
1288 /* device controller usb_gadget_ops structure */
1289 static const struct usb_gadget_ops mv_u3d_ops = {
1290         /* notify controller that VBUS is powered or not */
1291         .vbus_session   = mv_u3d_vbus_session,
1292
1293         /* constrain controller's VBUS power usage */
1294         .vbus_draw      = mv_u3d_vbus_draw,
1295
1296         .pullup         = mv_u3d_pullup,
1297         .udc_start      = mv_u3d_start,
1298         .udc_stop       = mv_u3d_stop,
1299 };
1300
1301 static int mv_u3d_eps_init(struct mv_u3d *u3d)
1302 {
1303         struct mv_u3d_ep        *ep;
1304         char name[14];
1305         int i;
1306
1307         /* initialize ep0, ep0 in/out use eps[1] */
1308         ep = &u3d->eps[1];
1309         ep->u3d = u3d;
1310         strncpy(ep->name, "ep0", sizeof(ep->name));
1311         ep->ep.name = ep->name;
1312         ep->ep.ops = &mv_u3d_ep_ops;
1313         ep->wedge = 0;
1314         usb_ep_set_maxpacket_limit(&ep->ep, MV_U3D_EP0_MAX_PKT_SIZE);
1315         ep->ep.caps.type_control = true;
1316         ep->ep.caps.dir_in = true;
1317         ep->ep.caps.dir_out = true;
1318         ep->ep_num = 0;
1319         ep->ep.desc = &mv_u3d_ep0_desc;
1320         INIT_LIST_HEAD(&ep->queue);
1321         INIT_LIST_HEAD(&ep->req_list);
1322         ep->ep_type = USB_ENDPOINT_XFER_CONTROL;
1323
1324         /* add ep0 ep_context */
1325         ep->ep_context = &u3d->ep_context[1];
1326
1327         /* initialize other endpoints */
1328         for (i = 2; i < u3d->max_eps * 2; i++) {
1329                 ep = &u3d->eps[i];
1330                 if (i & 1) {
1331                         snprintf(name, sizeof(name), "ep%din", i >> 1);
1332                         ep->direction = MV_U3D_EP_DIR_IN;
1333                         ep->ep.caps.dir_in = true;
1334                 } else {
1335                         snprintf(name, sizeof(name), "ep%dout", i >> 1);
1336                         ep->direction = MV_U3D_EP_DIR_OUT;
1337                         ep->ep.caps.dir_out = true;
1338                 }
1339                 ep->u3d = u3d;
1340                 strncpy(ep->name, name, sizeof(ep->name));
1341                 ep->ep.name = ep->name;
1342
1343                 ep->ep.caps.type_iso = true;
1344                 ep->ep.caps.type_bulk = true;
1345                 ep->ep.caps.type_int = true;
1346
1347                 ep->ep.ops = &mv_u3d_ep_ops;
1348                 usb_ep_set_maxpacket_limit(&ep->ep, (unsigned short) ~0);
1349                 ep->ep_num = i / 2;
1350
1351                 INIT_LIST_HEAD(&ep->queue);
1352                 list_add_tail(&ep->ep.ep_list, &u3d->gadget.ep_list);
1353
1354                 INIT_LIST_HEAD(&ep->req_list);
1355                 spin_lock_init(&ep->req_lock);
1356                 ep->ep_context = &u3d->ep_context[i];
1357         }
1358
1359         return 0;
1360 }
1361
1362 /* delete all endpoint requests, called with spinlock held */
1363 static void mv_u3d_nuke(struct mv_u3d_ep *ep, int status)
1364 {
1365         /* endpoint fifo flush */
1366         mv_u3d_ep_fifo_flush(&ep->ep);
1367
1368         while (!list_empty(&ep->queue)) {
1369                 struct mv_u3d_req *req = NULL;
1370                 req = list_entry(ep->queue.next, struct mv_u3d_req, queue);
1371                 mv_u3d_done(ep, req, status);
1372         }
1373 }
1374
1375 /* stop all USB activities */
1376 static
1377 void mv_u3d_stop_activity(struct mv_u3d *u3d, struct usb_gadget_driver *driver)
1378 {
1379         struct mv_u3d_ep        *ep;
1380
1381         mv_u3d_nuke(&u3d->eps[1], -ESHUTDOWN);
1382
1383         list_for_each_entry(ep, &u3d->gadget.ep_list, ep.ep_list) {
1384                 mv_u3d_nuke(ep, -ESHUTDOWN);
1385         }
1386
1387         /* report disconnect; the driver is already quiesced */
1388         if (driver) {
1389                 spin_unlock(&u3d->lock);
1390                 driver->disconnect(&u3d->gadget);
1391                 spin_lock(&u3d->lock);
1392         }
1393 }
1394
1395 static void mv_u3d_irq_process_error(struct mv_u3d *u3d)
1396 {
1397         /* Increment the error count */
1398         u3d->errors++;
1399         dev_err(u3d->dev, "%s\n", __func__);
1400 }
1401
1402 static void mv_u3d_irq_process_link_change(struct mv_u3d *u3d)
1403 {
1404         u32 linkchange;
1405
1406         linkchange = ioread32(&u3d->vuc_regs->linkchange);
1407         iowrite32(linkchange, &u3d->vuc_regs->linkchange);
1408
1409         dev_dbg(u3d->dev, "linkchange: 0x%x\n", linkchange);
1410
1411         if (linkchange & MV_U3D_LINK_CHANGE_LINK_UP) {
1412                 dev_dbg(u3d->dev, "link up: ltssm state: 0x%x\n",
1413                         ioread32(&u3d->vuc_regs->ltssmstate));
1414
1415                 u3d->usb_state = USB_STATE_DEFAULT;
1416                 u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
1417                 u3d->ep0_state = MV_U3D_WAIT_FOR_SETUP;
1418
1419                 /* set speed */
1420                 u3d->gadget.speed = USB_SPEED_SUPER;
1421         }
1422
1423         if (linkchange & MV_U3D_LINK_CHANGE_SUSPEND) {
1424                 dev_dbg(u3d->dev, "link suspend\n");
1425                 u3d->resume_state = u3d->usb_state;
1426                 u3d->usb_state = USB_STATE_SUSPENDED;
1427         }
1428
1429         if (linkchange & MV_U3D_LINK_CHANGE_RESUME) {
1430                 dev_dbg(u3d->dev, "link resume\n");
1431                 u3d->usb_state = u3d->resume_state;
1432                 u3d->resume_state = 0;
1433         }
1434
1435         if (linkchange & MV_U3D_LINK_CHANGE_WRESET) {
1436                 dev_dbg(u3d->dev, "warm reset\n");
1437                 u3d->usb_state = USB_STATE_POWERED;
1438         }
1439
1440         if (linkchange & MV_U3D_LINK_CHANGE_HRESET) {
1441                 dev_dbg(u3d->dev, "hot reset\n");
1442                 u3d->usb_state = USB_STATE_DEFAULT;
1443         }
1444
1445         if (linkchange & MV_U3D_LINK_CHANGE_INACT)
1446                 dev_dbg(u3d->dev, "inactive\n");
1447
1448         if (linkchange & MV_U3D_LINK_CHANGE_DISABLE_AFTER_U0)
1449                 dev_dbg(u3d->dev, "ss.disabled\n");
1450
1451         if (linkchange & MV_U3D_LINK_CHANGE_VBUS_INVALID) {
1452                 dev_dbg(u3d->dev, "vbus invalid\n");
1453                 u3d->usb_state = USB_STATE_ATTACHED;
1454                 u3d->vbus_valid_detect = 1;
1455                 /* if external vbus detect is not supported,
1456                  * we handle it here.
1457                  */
1458                 if (!u3d->vbus) {
1459                         spin_unlock(&u3d->lock);
1460                         mv_u3d_vbus_session(&u3d->gadget, 0);
1461                         spin_lock(&u3d->lock);
1462                 }
1463         }
1464 }
1465
1466 static void mv_u3d_ch9setaddress(struct mv_u3d *u3d,
1467                                 struct usb_ctrlrequest *setup)
1468 {
1469         u32 tmp;
1470
1471         if (u3d->usb_state != USB_STATE_DEFAULT) {
1472                 dev_err(u3d->dev,
1473                         "%s, cannot setaddr in this state (%d)\n",
1474                         __func__, u3d->usb_state);
1475                 goto err;
1476         }
1477
1478         u3d->dev_addr = (u8)setup->wValue;
1479
1480         dev_dbg(u3d->dev, "%s: 0x%x\n", __func__, u3d->dev_addr);
1481
1482         if (u3d->dev_addr > 127) {
1483                 dev_err(u3d->dev,
1484                         "%s, u3d address is wrong (out of range)\n", __func__);
1485                 u3d->dev_addr = 0;
1486                 goto err;
1487         }
1488
1489         /* update usb state */
1490         u3d->usb_state = USB_STATE_ADDRESS;
1491
1492         /* set the new address */
1493         tmp = ioread32(&u3d->vuc_regs->devaddrtiebrkr);
1494         tmp &= ~0x7F;
1495         tmp |= (u32)u3d->dev_addr;
1496         iowrite32(tmp, &u3d->vuc_regs->devaddrtiebrkr);
1497
1498         return;
1499 err:
1500         mv_u3d_ep0_stall(u3d);
1501 }
1502
1503 static int mv_u3d_is_set_configuration(struct usb_ctrlrequest *setup)
1504 {
1505         if ((setup->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD)
1506                 if (setup->bRequest == USB_REQ_SET_CONFIGURATION)
1507                         return 1;
1508
1509         return 0;
1510 }
1511
1512 static void mv_u3d_handle_setup_packet(struct mv_u3d *u3d, u8 ep_num,
1513         struct usb_ctrlrequest *setup)
1514         __releases(&u3c->lock)
1515         __acquires(&u3c->lock)
1516 {
1517         bool delegate = false;
1518
1519         mv_u3d_nuke(&u3d->eps[ep_num * 2 + MV_U3D_EP_DIR_IN], -ESHUTDOWN);
1520
1521         dev_dbg(u3d->dev, "SETUP %02x.%02x v%04x i%04x l%04x\n",
1522                         setup->bRequestType, setup->bRequest,
1523                         setup->wValue, setup->wIndex, setup->wLength);
1524
1525         /* We process some stardard setup requests here */
1526         if ((setup->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD) {
1527                 switch (setup->bRequest) {
1528                 case USB_REQ_GET_STATUS:
1529                         delegate = true;
1530                         break;
1531
1532                 case USB_REQ_SET_ADDRESS:
1533                         mv_u3d_ch9setaddress(u3d, setup);
1534                         break;
1535
1536                 case USB_REQ_CLEAR_FEATURE:
1537                         delegate = true;
1538                         break;
1539
1540                 case USB_REQ_SET_FEATURE:
1541                         delegate = true;
1542                         break;
1543
1544                 default:
1545                         delegate = true;
1546                 }
1547         } else
1548                 delegate = true;
1549
1550         /* delegate USB standard requests to the gadget driver */
1551         if (delegate) {
1552                 /* USB requests handled by gadget */
1553                 if (setup->wLength) {
1554                         /* DATA phase from gadget, STATUS phase from u3d */
1555                         u3d->ep0_dir = (setup->bRequestType & USB_DIR_IN)
1556                                         ? MV_U3D_EP_DIR_IN : MV_U3D_EP_DIR_OUT;
1557                         spin_unlock(&u3d->lock);
1558                         if (u3d->driver->setup(&u3d->gadget,
1559                                 &u3d->local_setup_buff) < 0) {
1560                                 dev_err(u3d->dev, "setup error!\n");
1561                                 mv_u3d_ep0_stall(u3d);
1562                         }
1563                         spin_lock(&u3d->lock);
1564                 } else {
1565                         /* no DATA phase, STATUS phase from gadget */
1566                         u3d->ep0_dir = MV_U3D_EP_DIR_IN;
1567                         u3d->ep0_state = MV_U3D_STATUS_STAGE;
1568                         spin_unlock(&u3d->lock);
1569                         if (u3d->driver->setup(&u3d->gadget,
1570                                 &u3d->local_setup_buff) < 0)
1571                                 mv_u3d_ep0_stall(u3d);
1572                         spin_lock(&u3d->lock);
1573                 }
1574
1575                 if (mv_u3d_is_set_configuration(setup)) {
1576                         dev_dbg(u3d->dev, "u3d configured\n");
1577                         u3d->usb_state = USB_STATE_CONFIGURED;
1578                 }
1579         }
1580 }
1581
1582 static void mv_u3d_get_setup_data(struct mv_u3d *u3d, u8 ep_num, u8 *buffer_ptr)
1583 {
1584         struct mv_u3d_ep_context *epcontext;
1585
1586         epcontext = &u3d->ep_context[ep_num * 2 + MV_U3D_EP_DIR_IN];
1587
1588         /* Copy the setup packet to local buffer */
1589         memcpy(buffer_ptr, (u8 *) &epcontext->setup_buffer, 8);
1590 }
1591
1592 static void mv_u3d_irq_process_setup(struct mv_u3d *u3d)
1593 {
1594         u32 tmp, i;
1595         /* Process all Setup packet received interrupts */
1596         tmp = ioread32(&u3d->vuc_regs->setuplock);
1597         if (tmp) {
1598                 for (i = 0; i < u3d->max_eps; i++) {
1599                         if (tmp & (1 << i)) {
1600                                 mv_u3d_get_setup_data(u3d, i,
1601                                         (u8 *)(&u3d->local_setup_buff));
1602                                 mv_u3d_handle_setup_packet(u3d, i,
1603                                         &u3d->local_setup_buff);
1604                         }
1605                 }
1606         }
1607
1608         iowrite32(tmp, &u3d->vuc_regs->setuplock);
1609 }
1610
1611 static void mv_u3d_irq_process_tr_complete(struct mv_u3d *u3d)
1612 {
1613         u32 tmp, bit_pos;
1614         int i, ep_num = 0, direction = 0;
1615         struct mv_u3d_ep        *curr_ep;
1616         struct mv_u3d_req *curr_req, *temp_req;
1617         int status;
1618
1619         tmp = ioread32(&u3d->vuc_regs->endcomplete);
1620
1621         dev_dbg(u3d->dev, "tr_complete: ep: 0x%x\n", tmp);
1622         if (!tmp)
1623                 return;
1624         iowrite32(tmp, &u3d->vuc_regs->endcomplete);
1625
1626         for (i = 0; i < u3d->max_eps * 2; i++) {
1627                 ep_num = i >> 1;
1628                 direction = i % 2;
1629
1630                 bit_pos = 1 << (ep_num + 16 * direction);
1631
1632                 if (!(bit_pos & tmp))
1633                         continue;
1634
1635                 if (i == 0)
1636                         curr_ep = &u3d->eps[1];
1637                 else
1638                         curr_ep = &u3d->eps[i];
1639
1640                 /* remove req out of ep request list after completion */
1641                 dev_dbg(u3d->dev, "tr comp: check req_list\n");
1642                 spin_lock(&curr_ep->req_lock);
1643                 if (!list_empty(&curr_ep->req_list)) {
1644                         struct mv_u3d_req *req;
1645                         req = list_entry(curr_ep->req_list.next,
1646                                                 struct mv_u3d_req, list);
1647                         list_del_init(&req->list);
1648                         curr_ep->processing = 0;
1649                 }
1650                 spin_unlock(&curr_ep->req_lock);
1651
1652                 /* process the req queue until an uncomplete request */
1653                 list_for_each_entry_safe(curr_req, temp_req,
1654                         &curr_ep->queue, queue) {
1655                         status = mv_u3d_process_ep_req(u3d, i, curr_req);
1656                         if (status)
1657                                 break;
1658                         /* write back status to req */
1659                         curr_req->req.status = status;
1660
1661                         /* ep0 request completion */
1662                         if (ep_num == 0) {
1663                                 mv_u3d_done(curr_ep, curr_req, 0);
1664                                 break;
1665                         } else {
1666                                 mv_u3d_done(curr_ep, curr_req, status);
1667                         }
1668                 }
1669
1670                 dev_dbg(u3d->dev, "call mv_u3d_start_queue from ep complete\n");
1671                 mv_u3d_start_queue(curr_ep);
1672         }
1673 }
1674
1675 static irqreturn_t mv_u3d_irq(int irq, void *dev)
1676 {
1677         struct mv_u3d *u3d = (struct mv_u3d *)dev;
1678         u32 status, intr;
1679         u32 bridgesetting;
1680         u32 trbunderrun;
1681
1682         spin_lock(&u3d->lock);
1683
1684         status = ioread32(&u3d->vuc_regs->intrcause);
1685         intr = ioread32(&u3d->vuc_regs->intrenable);
1686         status &= intr;
1687
1688         if (status == 0) {
1689                 spin_unlock(&u3d->lock);
1690                 dev_err(u3d->dev, "irq error!\n");
1691                 return IRQ_NONE;
1692         }
1693
1694         if (status & MV_U3D_USBINT_VBUS_VALID) {
1695                 bridgesetting = ioread32(&u3d->vuc_regs->bridgesetting);
1696                 if (bridgesetting & MV_U3D_BRIDGE_SETTING_VBUS_VALID) {
1697                         /* write vbus valid bit of bridge setting to clear */
1698                         bridgesetting = MV_U3D_BRIDGE_SETTING_VBUS_VALID;
1699                         iowrite32(bridgesetting, &u3d->vuc_regs->bridgesetting);
1700                         dev_dbg(u3d->dev, "vbus valid\n");
1701
1702                         u3d->usb_state = USB_STATE_POWERED;
1703                         u3d->vbus_valid_detect = 0;
1704                         /* if external vbus detect is not supported,
1705                          * we handle it here.
1706                          */
1707                         if (!u3d->vbus) {
1708                                 spin_unlock(&u3d->lock);
1709                                 mv_u3d_vbus_session(&u3d->gadget, 1);
1710                                 spin_lock(&u3d->lock);
1711                         }
1712                 } else
1713                         dev_err(u3d->dev, "vbus bit is not set\n");
1714         }
1715
1716         /* RX data is already in the 16KB FIFO.*/
1717         if (status & MV_U3D_USBINT_UNDER_RUN) {
1718                 trbunderrun = ioread32(&u3d->vuc_regs->trbunderrun);
1719                 dev_err(u3d->dev, "under run, ep%d\n", trbunderrun);
1720                 iowrite32(trbunderrun, &u3d->vuc_regs->trbunderrun);
1721                 mv_u3d_irq_process_error(u3d);
1722         }
1723
1724         if (status & (MV_U3D_USBINT_RXDESC_ERR | MV_U3D_USBINT_TXDESC_ERR)) {
1725                 /* write one to clear */
1726                 iowrite32(status & (MV_U3D_USBINT_RXDESC_ERR
1727                         | MV_U3D_USBINT_TXDESC_ERR),
1728                         &u3d->vuc_regs->intrcause);
1729                 dev_err(u3d->dev, "desc err 0x%x\n", status);
1730                 mv_u3d_irq_process_error(u3d);
1731         }
1732
1733         if (status & MV_U3D_USBINT_LINK_CHG)
1734                 mv_u3d_irq_process_link_change(u3d);
1735
1736         if (status & MV_U3D_USBINT_TX_COMPLETE)
1737                 mv_u3d_irq_process_tr_complete(u3d);
1738
1739         if (status & MV_U3D_USBINT_RX_COMPLETE)
1740                 mv_u3d_irq_process_tr_complete(u3d);
1741
1742         if (status & MV_U3D_USBINT_SETUP)
1743                 mv_u3d_irq_process_setup(u3d);
1744
1745         spin_unlock(&u3d->lock);
1746         return IRQ_HANDLED;
1747 }
1748
1749 static void mv_u3d_remove(struct platform_device *dev)
1750 {
1751         struct mv_u3d *u3d = platform_get_drvdata(dev);
1752
1753         BUG_ON(u3d == NULL);
1754
1755         usb_del_gadget_udc(&u3d->gadget);
1756
1757         /* free memory allocated in probe */
1758         dma_pool_destroy(u3d->trb_pool);
1759
1760         if (u3d->ep_context)
1761                 dma_free_coherent(&dev->dev, u3d->ep_context_size,
1762                         u3d->ep_context, u3d->ep_context_dma);
1763
1764         kfree(u3d->eps);
1765
1766         if (u3d->irq)
1767                 free_irq(u3d->irq, u3d);
1768
1769         if (u3d->cap_regs)
1770                 iounmap(u3d->cap_regs);
1771         u3d->cap_regs = NULL;
1772
1773         kfree(u3d->status_req);
1774
1775         clk_put(u3d->clk);
1776
1777         kfree(u3d);
1778 }
1779
1780 static int mv_u3d_probe(struct platform_device *dev)
1781 {
1782         struct mv_u3d *u3d;
1783         struct mv_usb_platform_data *pdata = dev_get_platdata(&dev->dev);
1784         int retval = 0;
1785         struct resource *r;
1786         size_t size;
1787
1788         if (!dev_get_platdata(&dev->dev)) {
1789                 dev_err(&dev->dev, "missing platform_data\n");
1790                 retval = -ENODEV;
1791                 goto err_pdata;
1792         }
1793
1794         u3d = kzalloc(sizeof(*u3d), GFP_KERNEL);
1795         if (!u3d) {
1796                 retval = -ENOMEM;
1797                 goto err_alloc_private;
1798         }
1799
1800         spin_lock_init(&u3d->lock);
1801
1802         platform_set_drvdata(dev, u3d);
1803
1804         u3d->dev = &dev->dev;
1805         u3d->vbus = pdata->vbus;
1806
1807         u3d->clk = clk_get(&dev->dev, NULL);
1808         if (IS_ERR(u3d->clk)) {
1809                 retval = PTR_ERR(u3d->clk);
1810                 goto err_get_clk;
1811         }
1812
1813         r = platform_get_resource_byname(dev, IORESOURCE_MEM, "capregs");
1814         if (!r) {
1815                 dev_err(&dev->dev, "no I/O memory resource defined\n");
1816                 retval = -ENODEV;
1817                 goto err_get_cap_regs;
1818         }
1819
1820         u3d->cap_regs = (struct mv_u3d_cap_regs __iomem *)
1821                 ioremap(r->start, resource_size(r));
1822         if (!u3d->cap_regs) {
1823                 dev_err(&dev->dev, "failed to map I/O memory\n");
1824                 retval = -EBUSY;
1825                 goto err_map_cap_regs;
1826         } else {
1827                 dev_dbg(&dev->dev, "cap_regs address: 0x%lx/0x%lx\n",
1828                         (unsigned long) r->start,
1829                         (unsigned long) u3d->cap_regs);
1830         }
1831
1832         /* we will access controller register, so enable the u3d controller */
1833         retval = clk_enable(u3d->clk);
1834         if (retval) {
1835                 dev_err(&dev->dev, "clk_enable error %d\n", retval);
1836                 goto err_u3d_enable;
1837         }
1838
1839         if (pdata->phy_init) {
1840                 retval = pdata->phy_init(u3d->phy_regs);
1841                 if (retval) {
1842                         dev_err(&dev->dev, "init phy error %d\n", retval);
1843                         clk_disable(u3d->clk);
1844                         goto err_phy_init;
1845                 }
1846         }
1847
1848         u3d->op_regs = (struct mv_u3d_op_regs __iomem *)(u3d->cap_regs
1849                 + MV_U3D_USB3_OP_REGS_OFFSET);
1850
1851         u3d->vuc_regs = (struct mv_u3d_vuc_regs __iomem *)(u3d->cap_regs
1852                 + ioread32(&u3d->cap_regs->vuoff));
1853
1854         u3d->max_eps = 16;
1855
1856         /*
1857          * some platform will use usb to download image, it may not disconnect
1858          * usb gadget before loading kernel. So first stop u3d here.
1859          */
1860         mv_u3d_controller_stop(u3d);
1861         iowrite32(0xFFFFFFFF, &u3d->vuc_regs->intrcause);
1862
1863         if (pdata->phy_deinit)
1864                 pdata->phy_deinit(u3d->phy_regs);
1865         clk_disable(u3d->clk);
1866
1867         size = u3d->max_eps * sizeof(struct mv_u3d_ep_context) * 2;
1868         size = (size + MV_U3D_EP_CONTEXT_ALIGNMENT - 1)
1869                 & ~(MV_U3D_EP_CONTEXT_ALIGNMENT - 1);
1870         u3d->ep_context = dma_alloc_coherent(&dev->dev, size,
1871                                         &u3d->ep_context_dma, GFP_KERNEL);
1872         if (!u3d->ep_context) {
1873                 dev_err(&dev->dev, "allocate ep context memory failed\n");
1874                 retval = -ENOMEM;
1875                 goto err_alloc_ep_context;
1876         }
1877         u3d->ep_context_size = size;
1878
1879         /* create TRB dma_pool resource */
1880         u3d->trb_pool = dma_pool_create("u3d_trb",
1881                         &dev->dev,
1882                         sizeof(struct mv_u3d_trb_hw),
1883                         MV_U3D_TRB_ALIGNMENT,
1884                         MV_U3D_DMA_BOUNDARY);
1885
1886         if (!u3d->trb_pool) {
1887                 retval = -ENOMEM;
1888                 goto err_alloc_trb_pool;
1889         }
1890
1891         size = u3d->max_eps * sizeof(struct mv_u3d_ep) * 2;
1892         u3d->eps = kzalloc(size, GFP_KERNEL);
1893         if (!u3d->eps) {
1894                 retval = -ENOMEM;
1895                 goto err_alloc_eps;
1896         }
1897
1898         /* initialize ep0 status request structure */
1899         u3d->status_req = kzalloc(sizeof(struct mv_u3d_req) + 8, GFP_KERNEL);
1900         if (!u3d->status_req) {
1901                 retval = -ENOMEM;
1902                 goto err_alloc_status_req;
1903         }
1904         INIT_LIST_HEAD(&u3d->status_req->queue);
1905
1906         /* allocate a small amount of memory to get valid address */
1907         u3d->status_req->req.buf = (char *)u3d->status_req
1908                                         + sizeof(struct mv_u3d_req);
1909         u3d->status_req->req.dma = virt_to_phys(u3d->status_req->req.buf);
1910
1911         u3d->resume_state = USB_STATE_NOTATTACHED;
1912         u3d->usb_state = USB_STATE_ATTACHED;
1913         u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
1914         u3d->remote_wakeup = 0;
1915
1916         r = platform_get_resource(dev, IORESOURCE_IRQ, 0);
1917         if (!r) {
1918                 dev_err(&dev->dev, "no IRQ resource defined\n");
1919                 retval = -ENODEV;
1920                 goto err_get_irq;
1921         }
1922         u3d->irq = r->start;
1923
1924         /* initialize gadget structure */
1925         u3d->gadget.ops = &mv_u3d_ops;  /* usb_gadget_ops */
1926         u3d->gadget.ep0 = &u3d->eps[1].ep;      /* gadget ep0 */
1927         INIT_LIST_HEAD(&u3d->gadget.ep_list);   /* ep_list */
1928         u3d->gadget.speed = USB_SPEED_UNKNOWN;  /* speed */
1929
1930         /* the "gadget" abstracts/virtualizes the controller */
1931         u3d->gadget.name = driver_name;         /* gadget name */
1932
1933         mv_u3d_eps_init(u3d);
1934
1935         if (request_irq(u3d->irq, mv_u3d_irq,
1936                 IRQF_SHARED, driver_name, u3d)) {
1937                 u3d->irq = 0;
1938                 dev_err(&dev->dev, "Request irq %d for u3d failed\n",
1939                         u3d->irq);
1940                 retval = -ENODEV;
1941                 goto err_request_irq;
1942         }
1943
1944         /* external vbus detection */
1945         if (u3d->vbus) {
1946                 u3d->clock_gating = 1;
1947                 dev_err(&dev->dev, "external vbus detection\n");
1948         }
1949
1950         if (!u3d->clock_gating)
1951                 u3d->vbus_active = 1;
1952
1953         /* enable usb3 controller vbus detection */
1954         u3d->vbus_valid_detect = 1;
1955
1956         retval = usb_add_gadget_udc(&dev->dev, &u3d->gadget);
1957         if (retval)
1958                 goto err_unregister;
1959
1960         dev_dbg(&dev->dev, "successful probe usb3 device %s clock gating.\n",
1961                 u3d->clock_gating ? "with" : "without");
1962
1963         return 0;
1964
1965 err_unregister:
1966         free_irq(u3d->irq, u3d);
1967 err_get_irq:
1968 err_request_irq:
1969         kfree(u3d->status_req);
1970 err_alloc_status_req:
1971         kfree(u3d->eps);
1972 err_alloc_eps:
1973         dma_pool_destroy(u3d->trb_pool);
1974 err_alloc_trb_pool:
1975         dma_free_coherent(&dev->dev, u3d->ep_context_size,
1976                 u3d->ep_context, u3d->ep_context_dma);
1977 err_alloc_ep_context:
1978 err_phy_init:
1979 err_u3d_enable:
1980         iounmap(u3d->cap_regs);
1981 err_map_cap_regs:
1982 err_get_cap_regs:
1983         clk_put(u3d->clk);
1984 err_get_clk:
1985         kfree(u3d);
1986 err_alloc_private:
1987 err_pdata:
1988         return retval;
1989 }
1990
1991 #ifdef CONFIG_PM_SLEEP
1992 static int mv_u3d_suspend(struct device *dev)
1993 {
1994         struct mv_u3d *u3d = dev_get_drvdata(dev);
1995
1996         /*
1997          * only cable is unplugged, usb can suspend.
1998          * So do not care about clock_gating == 1, it is handled by
1999          * vbus session.
2000          */
2001         if (!u3d->clock_gating) {
2002                 mv_u3d_controller_stop(u3d);
2003
2004                 spin_lock_irq(&u3d->lock);
2005                 /* stop all usb activities */
2006                 mv_u3d_stop_activity(u3d, u3d->driver);
2007                 spin_unlock_irq(&u3d->lock);
2008
2009                 mv_u3d_disable(u3d);
2010         }
2011
2012         return 0;
2013 }
2014
2015 static int mv_u3d_resume(struct device *dev)
2016 {
2017         struct mv_u3d *u3d = dev_get_drvdata(dev);
2018         int retval;
2019
2020         if (!u3d->clock_gating) {
2021                 retval = mv_u3d_enable(u3d);
2022                 if (retval)
2023                         return retval;
2024
2025                 if (u3d->driver && u3d->softconnect) {
2026                         mv_u3d_controller_reset(u3d);
2027                         mv_u3d_ep0_reset(u3d);
2028                         mv_u3d_controller_start(u3d);
2029                 }
2030         }
2031
2032         return 0;
2033 }
2034 #endif
2035
2036 static SIMPLE_DEV_PM_OPS(mv_u3d_pm_ops, mv_u3d_suspend, mv_u3d_resume);
2037
2038 static void mv_u3d_shutdown(struct platform_device *dev)
2039 {
2040         struct mv_u3d *u3d = platform_get_drvdata(dev);
2041         u32 tmp;
2042
2043         tmp = ioread32(&u3d->op_regs->usbcmd);
2044         tmp &= ~MV_U3D_CMD_RUN_STOP;
2045         iowrite32(tmp, &u3d->op_regs->usbcmd);
2046 }
2047
2048 static struct platform_driver mv_u3d_driver = {
2049         .probe          = mv_u3d_probe,
2050         .remove_new     = mv_u3d_remove,
2051         .shutdown       = mv_u3d_shutdown,
2052         .driver         = {
2053                 .name   = "mv-u3d",
2054                 .pm     = &mv_u3d_pm_ops,
2055         },
2056 };
2057
2058 module_platform_driver(mv_u3d_driver);
2059 MODULE_ALIAS("platform:mv-u3d");
2060 MODULE_DESCRIPTION(DRIVER_DESC);
2061 MODULE_AUTHOR("Yu Xu <yuxu@marvell.com>");
2062 MODULE_LICENSE("GPL");