GNU Linux-libre 5.19-rc6-gnu
[releases.git] / drivers / usb / gadget / udc / mv_u3d_core.c
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2011 Marvell International Ltd. All rights reserved.
4  */
5
6 #include <linux/module.h>
7 #include <linux/dma-mapping.h>
8 #include <linux/dmapool.h>
9 #include <linux/kernel.h>
10 #include <linux/delay.h>
11 #include <linux/ioport.h>
12 #include <linux/sched.h>
13 #include <linux/slab.h>
14 #include <linux/errno.h>
15 #include <linux/timer.h>
16 #include <linux/list.h>
17 #include <linux/notifier.h>
18 #include <linux/interrupt.h>
19 #include <linux/moduleparam.h>
20 #include <linux/device.h>
21 #include <linux/usb/ch9.h>
22 #include <linux/usb/gadget.h>
23 #include <linux/pm.h>
24 #include <linux/io.h>
25 #include <linux/irq.h>
26 #include <linux/platform_device.h>
27 #include <linux/platform_data/mv_usb.h>
28 #include <linux/clk.h>
29
30 #include "mv_u3d.h"
31
32 #define DRIVER_DESC             "Marvell PXA USB3.0 Device Controller driver"
33
34 static const char driver_name[] = "mv_u3d";
35
36 static void mv_u3d_nuke(struct mv_u3d_ep *ep, int status);
37 static void mv_u3d_stop_activity(struct mv_u3d *u3d,
38                         struct usb_gadget_driver *driver);
39
40 /* for endpoint 0 operations */
41 static const struct usb_endpoint_descriptor mv_u3d_ep0_desc = {
42         .bLength =              USB_DT_ENDPOINT_SIZE,
43         .bDescriptorType =      USB_DT_ENDPOINT,
44         .bEndpointAddress =     0,
45         .bmAttributes =         USB_ENDPOINT_XFER_CONTROL,
46         .wMaxPacketSize =       MV_U3D_EP0_MAX_PKT_SIZE,
47 };
48
49 static void mv_u3d_ep0_reset(struct mv_u3d *u3d)
50 {
51         struct mv_u3d_ep *ep;
52         u32 epxcr;
53         int i;
54
55         for (i = 0; i < 2; i++) {
56                 ep = &u3d->eps[i];
57                 ep->u3d = u3d;
58
59                 /* ep0 ep context, ep0 in and out share the same ep context */
60                 ep->ep_context = &u3d->ep_context[1];
61         }
62
63         /* reset ep state machine */
64         /* reset ep0 out */
65         epxcr = ioread32(&u3d->vuc_regs->epcr[0].epxoutcr0);
66         epxcr |= MV_U3D_EPXCR_EP_INIT;
67         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxoutcr0);
68         udelay(5);
69         epxcr &= ~MV_U3D_EPXCR_EP_INIT;
70         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxoutcr0);
71
72         epxcr = ((MV_U3D_EP0_MAX_PKT_SIZE
73                 << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
74                 | (1 << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
75                 | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
76                 | MV_U3D_EPXCR_EP_TYPE_CONTROL);
77         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxoutcr1);
78
79         /* reset ep0 in */
80         epxcr = ioread32(&u3d->vuc_regs->epcr[0].epxincr0);
81         epxcr |= MV_U3D_EPXCR_EP_INIT;
82         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxincr0);
83         udelay(5);
84         epxcr &= ~MV_U3D_EPXCR_EP_INIT;
85         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxincr0);
86
87         epxcr = ((MV_U3D_EP0_MAX_PKT_SIZE
88                 << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
89                 | (1 << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
90                 | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
91                 | MV_U3D_EPXCR_EP_TYPE_CONTROL);
92         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxincr1);
93 }
94
95 static void mv_u3d_ep0_stall(struct mv_u3d *u3d)
96 {
97         u32 tmp;
98         dev_dbg(u3d->dev, "%s\n", __func__);
99
100         /* set TX and RX to stall */
101         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxoutcr0);
102         tmp |= MV_U3D_EPXCR_EP_HALT;
103         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxoutcr0);
104
105         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxincr0);
106         tmp |= MV_U3D_EPXCR_EP_HALT;
107         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxincr0);
108
109         /* update ep0 state */
110         u3d->ep0_state = MV_U3D_WAIT_FOR_SETUP;
111         u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
112 }
113
114 static int mv_u3d_process_ep_req(struct mv_u3d *u3d, int index,
115         struct mv_u3d_req *curr_req)
116 {
117         struct mv_u3d_trb       *curr_trb;
118         int actual, remaining_length = 0;
119         int direction, ep_num;
120         int retval = 0;
121         u32 tmp, status, length;
122
123         direction = index % 2;
124         ep_num = index / 2;
125
126         actual = curr_req->req.length;
127
128         while (!list_empty(&curr_req->trb_list)) {
129                 curr_trb = list_entry(curr_req->trb_list.next,
130                                         struct mv_u3d_trb, trb_list);
131                 if (!curr_trb->trb_hw->ctrl.own) {
132                         dev_err(u3d->dev, "%s, TRB own error!\n",
133                                 u3d->eps[index].name);
134                         return 1;
135                 }
136
137                 curr_trb->trb_hw->ctrl.own = 0;
138                 if (direction == MV_U3D_EP_DIR_OUT)
139                         tmp = ioread32(&u3d->vuc_regs->rxst[ep_num].statuslo);
140                 else
141                         tmp = ioread32(&u3d->vuc_regs->txst[ep_num].statuslo);
142
143                 status = tmp >> MV_U3D_XFERSTATUS_COMPLETE_SHIFT;
144                 length = tmp & MV_U3D_XFERSTATUS_TRB_LENGTH_MASK;
145
146                 if (status == MV_U3D_COMPLETE_SUCCESS ||
147                         (status == MV_U3D_COMPLETE_SHORT_PACKET &&
148                         direction == MV_U3D_EP_DIR_OUT)) {
149                         remaining_length += length;
150                         actual -= remaining_length;
151                 } else {
152                         dev_err(u3d->dev,
153                                 "complete_tr error: ep=%d %s: error = 0x%x\n",
154                                 index >> 1, direction ? "SEND" : "RECV",
155                                 status);
156                         retval = -EPROTO;
157                 }
158
159                 list_del_init(&curr_trb->trb_list);
160         }
161         if (retval)
162                 return retval;
163
164         curr_req->req.actual = actual;
165         return 0;
166 }
167
168 /*
169  * mv_u3d_done() - retire a request; caller blocked irqs
170  * @status : request status to be set, only works when
171  * request is still in progress.
172  */
173 static
174 void mv_u3d_done(struct mv_u3d_ep *ep, struct mv_u3d_req *req, int status)
175         __releases(&ep->udc->lock)
176         __acquires(&ep->udc->lock)
177 {
178         struct mv_u3d *u3d = (struct mv_u3d *)ep->u3d;
179
180         dev_dbg(u3d->dev, "mv_u3d_done: remove req->queue\n");
181         /* Removed the req from ep queue */
182         list_del_init(&req->queue);
183
184         /* req.status should be set as -EINPROGRESS in ep_queue() */
185         if (req->req.status == -EINPROGRESS)
186                 req->req.status = status;
187         else
188                 status = req->req.status;
189
190         /* Free trb for the request */
191         if (!req->chain)
192                 dma_pool_free(u3d->trb_pool,
193                         req->trb_head->trb_hw, req->trb_head->trb_dma);
194         else {
195                 dma_unmap_single(ep->u3d->gadget.dev.parent,
196                         (dma_addr_t)req->trb_head->trb_dma,
197                         req->trb_count * sizeof(struct mv_u3d_trb_hw),
198                         DMA_BIDIRECTIONAL);
199                 kfree(req->trb_head->trb_hw);
200         }
201         kfree(req->trb_head);
202
203         usb_gadget_unmap_request(&u3d->gadget, &req->req, mv_u3d_ep_dir(ep));
204
205         if (status && (status != -ESHUTDOWN)) {
206                 dev_dbg(u3d->dev, "complete %s req %p stat %d len %u/%u",
207                         ep->ep.name, &req->req, status,
208                         req->req.actual, req->req.length);
209         }
210
211         spin_unlock(&ep->u3d->lock);
212
213         usb_gadget_giveback_request(&ep->ep, &req->req);
214
215         spin_lock(&ep->u3d->lock);
216 }
217
218 static int mv_u3d_queue_trb(struct mv_u3d_ep *ep, struct mv_u3d_req *req)
219 {
220         u32 tmp, direction;
221         struct mv_u3d *u3d;
222         struct mv_u3d_ep_context *ep_context;
223         int retval = 0;
224
225         u3d = ep->u3d;
226         direction = mv_u3d_ep_dir(ep);
227
228         /* ep0 in and out share the same ep context slot 1*/
229         if (ep->ep_num == 0)
230                 ep_context = &(u3d->ep_context[1]);
231         else
232                 ep_context = &(u3d->ep_context[ep->ep_num * 2 + direction]);
233
234         /* check if the pipe is empty or not */
235         if (!list_empty(&ep->queue)) {
236                 dev_err(u3d->dev, "add trb to non-empty queue!\n");
237                 retval = -ENOMEM;
238                 WARN_ON(1);
239         } else {
240                 ep_context->rsvd0 = cpu_to_le32(1);
241                 ep_context->rsvd1 = 0;
242
243                 /* Configure the trb address and set the DCS bit.
244                  * Both DCS bit and own bit in trb should be set.
245                  */
246                 ep_context->trb_addr_lo =
247                         cpu_to_le32(req->trb_head->trb_dma | DCS_ENABLE);
248                 ep_context->trb_addr_hi = 0;
249
250                 /* Ensure that updates to the EP Context will
251                  * occure before Ring Bell.
252                  */
253                 wmb();
254
255                 /* ring bell the ep */
256                 if (ep->ep_num == 0)
257                         tmp = 0x1;
258                 else
259                         tmp = ep->ep_num * 2
260                                 + ((direction == MV_U3D_EP_DIR_OUT) ? 0 : 1);
261
262                 iowrite32(tmp, &u3d->op_regs->doorbell);
263         }
264         return retval;
265 }
266
267 static struct mv_u3d_trb *mv_u3d_build_trb_one(struct mv_u3d_req *req,
268                                 unsigned *length, dma_addr_t *dma)
269 {
270         u32 temp;
271         unsigned int direction;
272         struct mv_u3d_trb *trb;
273         struct mv_u3d_trb_hw *trb_hw;
274         struct mv_u3d *u3d;
275
276         /* how big will this transfer be? */
277         *length = req->req.length - req->req.actual;
278         BUG_ON(*length > (unsigned)MV_U3D_EP_MAX_LENGTH_TRANSFER);
279
280         u3d = req->ep->u3d;
281
282         trb = kzalloc(sizeof(*trb), GFP_ATOMIC);
283         if (!trb)
284                 return NULL;
285
286         /*
287          * Be careful that no _GFP_HIGHMEM is set,
288          * or we can not use dma_to_virt
289          * cannot use GFP_KERNEL in spin lock
290          */
291         trb_hw = dma_pool_alloc(u3d->trb_pool, GFP_ATOMIC, dma);
292         if (!trb_hw) {
293                 kfree(trb);
294                 dev_err(u3d->dev,
295                         "%s, dma_pool_alloc fail\n", __func__);
296                 return NULL;
297         }
298         trb->trb_dma = *dma;
299         trb->trb_hw = trb_hw;
300
301         /* initialize buffer page pointers */
302         temp = (u32)(req->req.dma + req->req.actual);
303
304         trb_hw->buf_addr_lo = cpu_to_le32(temp);
305         trb_hw->buf_addr_hi = 0;
306         trb_hw->trb_len = cpu_to_le32(*length);
307         trb_hw->ctrl.own = 1;
308
309         if (req->ep->ep_num == 0)
310                 trb_hw->ctrl.type = TYPE_DATA;
311         else
312                 trb_hw->ctrl.type = TYPE_NORMAL;
313
314         req->req.actual += *length;
315
316         direction = mv_u3d_ep_dir(req->ep);
317         if (direction == MV_U3D_EP_DIR_IN)
318                 trb_hw->ctrl.dir = 1;
319         else
320                 trb_hw->ctrl.dir = 0;
321
322         /* Enable interrupt for the last trb of a request */
323         if (!req->req.no_interrupt)
324                 trb_hw->ctrl.ioc = 1;
325
326         trb_hw->ctrl.chain = 0;
327
328         wmb();
329         return trb;
330 }
331
332 static int mv_u3d_build_trb_chain(struct mv_u3d_req *req, unsigned *length,
333                 struct mv_u3d_trb *trb, int *is_last)
334 {
335         u32 temp;
336         unsigned int direction;
337         struct mv_u3d *u3d;
338
339         /* how big will this transfer be? */
340         *length = min(req->req.length - req->req.actual,
341                         (unsigned)MV_U3D_EP_MAX_LENGTH_TRANSFER);
342
343         u3d = req->ep->u3d;
344
345         trb->trb_dma = 0;
346
347         /* initialize buffer page pointers */
348         temp = (u32)(req->req.dma + req->req.actual);
349
350         trb->trb_hw->buf_addr_lo = cpu_to_le32(temp);
351         trb->trb_hw->buf_addr_hi = 0;
352         trb->trb_hw->trb_len = cpu_to_le32(*length);
353         trb->trb_hw->ctrl.own = 1;
354
355         if (req->ep->ep_num == 0)
356                 trb->trb_hw->ctrl.type = TYPE_DATA;
357         else
358                 trb->trb_hw->ctrl.type = TYPE_NORMAL;
359
360         req->req.actual += *length;
361
362         direction = mv_u3d_ep_dir(req->ep);
363         if (direction == MV_U3D_EP_DIR_IN)
364                 trb->trb_hw->ctrl.dir = 1;
365         else
366                 trb->trb_hw->ctrl.dir = 0;
367
368         /* zlp is needed if req->req.zero is set */
369         if (req->req.zero) {
370                 if (*length == 0 || (*length % req->ep->ep.maxpacket) != 0)
371                         *is_last = 1;
372                 else
373                         *is_last = 0;
374         } else if (req->req.length == req->req.actual)
375                 *is_last = 1;
376         else
377                 *is_last = 0;
378
379         /* Enable interrupt for the last trb of a request */
380         if (*is_last && !req->req.no_interrupt)
381                 trb->trb_hw->ctrl.ioc = 1;
382
383         if (*is_last)
384                 trb->trb_hw->ctrl.chain = 0;
385         else {
386                 trb->trb_hw->ctrl.chain = 1;
387                 dev_dbg(u3d->dev, "chain trb\n");
388         }
389
390         wmb();
391
392         return 0;
393 }
394
395 /* generate TRB linked list for a request
396  * usb controller only supports continous trb chain,
397  * that trb structure physical address should be continous.
398  */
399 static int mv_u3d_req_to_trb(struct mv_u3d_req *req)
400 {
401         unsigned count;
402         int is_last;
403         struct mv_u3d_trb *trb;
404         struct mv_u3d_trb_hw *trb_hw;
405         struct mv_u3d *u3d;
406         dma_addr_t dma;
407         unsigned length;
408         unsigned trb_num;
409
410         u3d = req->ep->u3d;
411
412         INIT_LIST_HEAD(&req->trb_list);
413
414         length = req->req.length - req->req.actual;
415         /* normally the request transfer length is less than 16KB.
416          * we use buil_trb_one() to optimize it.
417          */
418         if (length <= (unsigned)MV_U3D_EP_MAX_LENGTH_TRANSFER) {
419                 trb = mv_u3d_build_trb_one(req, &count, &dma);
420                 list_add_tail(&trb->trb_list, &req->trb_list);
421                 req->trb_head = trb;
422                 req->trb_count = 1;
423                 req->chain = 0;
424         } else {
425                 trb_num = length / MV_U3D_EP_MAX_LENGTH_TRANSFER;
426                 if (length % MV_U3D_EP_MAX_LENGTH_TRANSFER)
427                         trb_num++;
428
429                 trb = kcalloc(trb_num, sizeof(*trb), GFP_ATOMIC);
430                 if (!trb)
431                         return -ENOMEM;
432
433                 trb_hw = kcalloc(trb_num, sizeof(*trb_hw), GFP_ATOMIC);
434                 if (!trb_hw) {
435                         kfree(trb);
436                         return -ENOMEM;
437                 }
438
439                 do {
440                         trb->trb_hw = trb_hw;
441                         if (mv_u3d_build_trb_chain(req, &count,
442                                                 trb, &is_last)) {
443                                 dev_err(u3d->dev,
444                                         "%s, mv_u3d_build_trb_chain fail\n",
445                                         __func__);
446                                 return -EIO;
447                         }
448
449                         list_add_tail(&trb->trb_list, &req->trb_list);
450                         req->trb_count++;
451                         trb++;
452                         trb_hw++;
453                 } while (!is_last);
454
455                 req->trb_head = list_entry(req->trb_list.next,
456                                         struct mv_u3d_trb, trb_list);
457                 req->trb_head->trb_dma = dma_map_single(u3d->gadget.dev.parent,
458                                         req->trb_head->trb_hw,
459                                         trb_num * sizeof(*trb_hw),
460                                         DMA_BIDIRECTIONAL);
461                 if (dma_mapping_error(u3d->gadget.dev.parent,
462                                         req->trb_head->trb_dma)) {
463                         kfree(req->trb_head->trb_hw);
464                         kfree(req->trb_head);
465                         return -EFAULT;
466                 }
467
468                 req->chain = 1;
469         }
470
471         return 0;
472 }
473
474 static int
475 mv_u3d_start_queue(struct mv_u3d_ep *ep)
476 {
477         struct mv_u3d *u3d = ep->u3d;
478         struct mv_u3d_req *req;
479         int ret;
480
481         if (!list_empty(&ep->req_list) && !ep->processing)
482                 req = list_entry(ep->req_list.next, struct mv_u3d_req, list);
483         else
484                 return 0;
485
486         ep->processing = 1;
487
488         /* set up dma mapping */
489         ret = usb_gadget_map_request(&u3d->gadget, &req->req,
490                                         mv_u3d_ep_dir(ep));
491         if (ret)
492                 goto break_processing;
493
494         req->req.status = -EINPROGRESS;
495         req->req.actual = 0;
496         req->trb_count = 0;
497
498         /* build trbs */
499         ret = mv_u3d_req_to_trb(req);
500         if (ret) {
501                 dev_err(u3d->dev, "%s, mv_u3d_req_to_trb fail\n", __func__);
502                 goto break_processing;
503         }
504
505         /* and push them to device queue */
506         ret = mv_u3d_queue_trb(ep, req);
507         if (ret)
508                 goto break_processing;
509
510         /* irq handler advances the queue */
511         list_add_tail(&req->queue, &ep->queue);
512
513         return 0;
514
515 break_processing:
516         ep->processing = 0;
517         return ret;
518 }
519
520 static int mv_u3d_ep_enable(struct usb_ep *_ep,
521                 const struct usb_endpoint_descriptor *desc)
522 {
523         struct mv_u3d *u3d;
524         struct mv_u3d_ep *ep;
525         u16 max = 0;
526         unsigned maxburst = 0;
527         u32 epxcr, direction;
528
529         if (!_ep || !desc || desc->bDescriptorType != USB_DT_ENDPOINT)
530                 return -EINVAL;
531
532         ep = container_of(_ep, struct mv_u3d_ep, ep);
533         u3d = ep->u3d;
534
535         if (!u3d->driver || u3d->gadget.speed == USB_SPEED_UNKNOWN)
536                 return -ESHUTDOWN;
537
538         direction = mv_u3d_ep_dir(ep);
539         max = le16_to_cpu(desc->wMaxPacketSize);
540
541         if (!_ep->maxburst)
542                 _ep->maxburst = 1;
543         maxburst = _ep->maxburst;
544
545         /* Set the max burst size */
546         switch (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK) {
547         case USB_ENDPOINT_XFER_BULK:
548                 if (maxburst > 16) {
549                         dev_dbg(u3d->dev,
550                                 "max burst should not be greater "
551                                 "than 16 on bulk ep\n");
552                         maxburst = 1;
553                         _ep->maxburst = maxburst;
554                 }
555                 dev_dbg(u3d->dev,
556                         "maxburst: %d on bulk %s\n", maxburst, ep->name);
557                 break;
558         case USB_ENDPOINT_XFER_CONTROL:
559                 /* control transfer only supports maxburst as one */
560                 maxburst = 1;
561                 _ep->maxburst = maxburst;
562                 break;
563         case USB_ENDPOINT_XFER_INT:
564                 if (maxburst != 1) {
565                         dev_dbg(u3d->dev,
566                                 "max burst should be 1 on int ep "
567                                 "if transfer size is not 1024\n");
568                         maxburst = 1;
569                         _ep->maxburst = maxburst;
570                 }
571                 break;
572         case USB_ENDPOINT_XFER_ISOC:
573                 if (maxburst != 1) {
574                         dev_dbg(u3d->dev,
575                                 "max burst should be 1 on isoc ep "
576                                 "if transfer size is not 1024\n");
577                         maxburst = 1;
578                         _ep->maxburst = maxburst;
579                 }
580                 break;
581         default:
582                 goto en_done;
583         }
584
585         ep->ep.maxpacket = max;
586         ep->ep.desc = desc;
587         ep->enabled = 1;
588
589         /* Enable the endpoint for Rx or Tx and set the endpoint type */
590         if (direction == MV_U3D_EP_DIR_OUT) {
591                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
592                 epxcr |= MV_U3D_EPXCR_EP_INIT;
593                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
594                 udelay(5);
595                 epxcr &= ~MV_U3D_EPXCR_EP_INIT;
596                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
597
598                 epxcr = ((max << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
599                       | ((maxburst - 1) << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
600                       | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
601                       | (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK));
602                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr1);
603         } else {
604                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
605                 epxcr |= MV_U3D_EPXCR_EP_INIT;
606                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
607                 udelay(5);
608                 epxcr &= ~MV_U3D_EPXCR_EP_INIT;
609                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
610
611                 epxcr = ((max << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
612                       | ((maxburst - 1) << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
613                       | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
614                       | (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK));
615                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr1);
616         }
617
618         return 0;
619 en_done:
620         return -EINVAL;
621 }
622
623 static int  mv_u3d_ep_disable(struct usb_ep *_ep)
624 {
625         struct mv_u3d *u3d;
626         struct mv_u3d_ep *ep;
627         u32 epxcr, direction;
628         unsigned long flags;
629
630         if (!_ep)
631                 return -EINVAL;
632
633         ep = container_of(_ep, struct mv_u3d_ep, ep);
634         if (!ep->ep.desc)
635                 return -EINVAL;
636
637         u3d = ep->u3d;
638
639         direction = mv_u3d_ep_dir(ep);
640
641         /* nuke all pending requests (does flush) */
642         spin_lock_irqsave(&u3d->lock, flags);
643         mv_u3d_nuke(ep, -ESHUTDOWN);
644         spin_unlock_irqrestore(&u3d->lock, flags);
645
646         /* Disable the endpoint for Rx or Tx and reset the endpoint type */
647         if (direction == MV_U3D_EP_DIR_OUT) {
648                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr1);
649                 epxcr &= ~((1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
650                       | USB_ENDPOINT_XFERTYPE_MASK);
651                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr1);
652         } else {
653                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr1);
654                 epxcr &= ~((1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
655                       | USB_ENDPOINT_XFERTYPE_MASK);
656                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr1);
657         }
658
659         ep->enabled = 0;
660
661         ep->ep.desc = NULL;
662         return 0;
663 }
664
665 static struct usb_request *
666 mv_u3d_alloc_request(struct usb_ep *_ep, gfp_t gfp_flags)
667 {
668         struct mv_u3d_req *req = NULL;
669
670         req = kzalloc(sizeof *req, gfp_flags);
671         if (!req)
672                 return NULL;
673
674         INIT_LIST_HEAD(&req->queue);
675
676         return &req->req;
677 }
678
679 static void mv_u3d_free_request(struct usb_ep *_ep, struct usb_request *_req)
680 {
681         struct mv_u3d_req *req = container_of(_req, struct mv_u3d_req, req);
682
683         kfree(req);
684 }
685
686 static void mv_u3d_ep_fifo_flush(struct usb_ep *_ep)
687 {
688         struct mv_u3d *u3d;
689         u32 direction;
690         struct mv_u3d_ep *ep = container_of(_ep, struct mv_u3d_ep, ep);
691         unsigned int loops;
692         u32 tmp;
693
694         /* if endpoint is not enabled, cannot flush endpoint */
695         if (!ep->enabled)
696                 return;
697
698         u3d = ep->u3d;
699         direction = mv_u3d_ep_dir(ep);
700
701         /* ep0 need clear bit after flushing fifo. */
702         if (!ep->ep_num) {
703                 if (direction == MV_U3D_EP_DIR_OUT) {
704                         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxoutcr0);
705                         tmp |= MV_U3D_EPXCR_EP_FLUSH;
706                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxoutcr0);
707                         udelay(10);
708                         tmp &= ~MV_U3D_EPXCR_EP_FLUSH;
709                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxoutcr0);
710                 } else {
711                         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxincr0);
712                         tmp |= MV_U3D_EPXCR_EP_FLUSH;
713                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxincr0);
714                         udelay(10);
715                         tmp &= ~MV_U3D_EPXCR_EP_FLUSH;
716                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxincr0);
717                 }
718                 return;
719         }
720
721         if (direction == MV_U3D_EP_DIR_OUT) {
722                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
723                 tmp |= MV_U3D_EPXCR_EP_FLUSH;
724                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
725
726                 /* Wait until flushing completed */
727                 loops = LOOPS(MV_U3D_FLUSH_TIMEOUT);
728                 while (ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0) &
729                         MV_U3D_EPXCR_EP_FLUSH) {
730                         /*
731                          * EP_FLUSH bit should be cleared to indicate this
732                          * operation is complete
733                          */
734                         if (loops == 0) {
735                                 dev_dbg(u3d->dev,
736                                     "EP FLUSH TIMEOUT for ep%d%s\n", ep->ep_num,
737                                     direction ? "in" : "out");
738                                 return;
739                         }
740                         loops--;
741                         udelay(LOOPS_USEC);
742                 }
743         } else {        /* EP_DIR_IN */
744                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
745                 tmp |= MV_U3D_EPXCR_EP_FLUSH;
746                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
747
748                 /* Wait until flushing completed */
749                 loops = LOOPS(MV_U3D_FLUSH_TIMEOUT);
750                 while (ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0) &
751                         MV_U3D_EPXCR_EP_FLUSH) {
752                         /*
753                         * EP_FLUSH bit should be cleared to indicate this
754                         * operation is complete
755                         */
756                         if (loops == 0) {
757                                 dev_dbg(u3d->dev,
758                                     "EP FLUSH TIMEOUT for ep%d%s\n", ep->ep_num,
759                                     direction ? "in" : "out");
760                                 return;
761                         }
762                         loops--;
763                         udelay(LOOPS_USEC);
764                 }
765         }
766 }
767
768 /* queues (submits) an I/O request to an endpoint */
769 static int
770 mv_u3d_ep_queue(struct usb_ep *_ep, struct usb_request *_req, gfp_t gfp_flags)
771 {
772         struct mv_u3d_ep *ep;
773         struct mv_u3d_req *req;
774         struct mv_u3d *u3d;
775         unsigned long flags;
776         int is_first_req = 0;
777
778         if (unlikely(!_ep || !_req))
779                 return -EINVAL;
780
781         ep = container_of(_ep, struct mv_u3d_ep, ep);
782         u3d = ep->u3d;
783
784         req = container_of(_req, struct mv_u3d_req, req);
785
786         if (!ep->ep_num
787                 && u3d->ep0_state == MV_U3D_STATUS_STAGE
788                 && !_req->length) {
789                 dev_dbg(u3d->dev, "ep0 status stage\n");
790                 u3d->ep0_state = MV_U3D_WAIT_FOR_SETUP;
791                 return 0;
792         }
793
794         dev_dbg(u3d->dev, "%s: %s, req: 0x%p\n",
795                         __func__, _ep->name, req);
796
797         /* catch various bogus parameters */
798         if (!req->req.complete || !req->req.buf
799                         || !list_empty(&req->queue)) {
800                 dev_err(u3d->dev,
801                         "%s, bad params, _req: 0x%p,"
802                         "req->req.complete: 0x%p, req->req.buf: 0x%p,"
803                         "list_empty: 0x%x\n",
804                         __func__, _req,
805                         req->req.complete, req->req.buf,
806                         list_empty(&req->queue));
807                 return -EINVAL;
808         }
809         if (unlikely(!ep->ep.desc)) {
810                 dev_err(u3d->dev, "%s, bad ep\n", __func__);
811                 return -EINVAL;
812         }
813         if (ep->ep.desc->bmAttributes == USB_ENDPOINT_XFER_ISOC) {
814                 if (req->req.length > ep->ep.maxpacket)
815                         return -EMSGSIZE;
816         }
817
818         if (!u3d->driver || u3d->gadget.speed == USB_SPEED_UNKNOWN) {
819                 dev_err(u3d->dev,
820                         "bad params of driver/speed\n");
821                 return -ESHUTDOWN;
822         }
823
824         req->ep = ep;
825
826         /* Software list handles usb request. */
827         spin_lock_irqsave(&ep->req_lock, flags);
828         is_first_req = list_empty(&ep->req_list);
829         list_add_tail(&req->list, &ep->req_list);
830         spin_unlock_irqrestore(&ep->req_lock, flags);
831         if (!is_first_req) {
832                 dev_dbg(u3d->dev, "list is not empty\n");
833                 return 0;
834         }
835
836         dev_dbg(u3d->dev, "call mv_u3d_start_queue from usb_ep_queue\n");
837         spin_lock_irqsave(&u3d->lock, flags);
838         mv_u3d_start_queue(ep);
839         spin_unlock_irqrestore(&u3d->lock, flags);
840         return 0;
841 }
842
843 /* dequeues (cancels, unlinks) an I/O request from an endpoint */
844 static int mv_u3d_ep_dequeue(struct usb_ep *_ep, struct usb_request *_req)
845 {
846         struct mv_u3d_ep *ep;
847         struct mv_u3d_req *req = NULL, *iter;
848         struct mv_u3d *u3d;
849         struct mv_u3d_ep_context *ep_context;
850         struct mv_u3d_req *next_req;
851
852         unsigned long flags;
853         int ret = 0;
854
855         if (!_ep || !_req)
856                 return -EINVAL;
857
858         ep = container_of(_ep, struct mv_u3d_ep, ep);
859         u3d = ep->u3d;
860
861         spin_lock_irqsave(&ep->u3d->lock, flags);
862
863         /* make sure it's actually queued on this endpoint */
864         list_for_each_entry(iter, &ep->queue, queue) {
865                 if (&iter->req != _req)
866                         continue;
867                 req = iter;
868                 break;
869         }
870         if (!req) {
871                 ret = -EINVAL;
872                 goto out;
873         }
874
875         /* The request is in progress, or completed but not dequeued */
876         if (ep->queue.next == &req->queue) {
877                 _req->status = -ECONNRESET;
878                 mv_u3d_ep_fifo_flush(_ep);
879
880                 /* The request isn't the last request in this ep queue */
881                 if (req->queue.next != &ep->queue) {
882                         dev_dbg(u3d->dev,
883                                 "it is the last request in this ep queue\n");
884                         ep_context = ep->ep_context;
885                         next_req = list_entry(req->queue.next,
886                                         struct mv_u3d_req, queue);
887
888                         /* Point first TRB of next request to the EP context. */
889                         iowrite32((unsigned long) next_req->trb_head,
890                                         &ep_context->trb_addr_lo);
891                 } else {
892                         struct mv_u3d_ep_context *ep_context;
893                         ep_context = ep->ep_context;
894                         ep_context->trb_addr_lo = 0;
895                         ep_context->trb_addr_hi = 0;
896                 }
897
898         } else
899                 WARN_ON(1);
900
901         mv_u3d_done(ep, req, -ECONNRESET);
902
903         /* remove the req from the ep req list */
904         if (!list_empty(&ep->req_list)) {
905                 struct mv_u3d_req *curr_req;
906                 curr_req = list_entry(ep->req_list.next,
907                                         struct mv_u3d_req, list);
908                 if (curr_req == req) {
909                         list_del_init(&req->list);
910                         ep->processing = 0;
911                 }
912         }
913
914 out:
915         spin_unlock_irqrestore(&ep->u3d->lock, flags);
916         return ret;
917 }
918
919 static void
920 mv_u3d_ep_set_stall(struct mv_u3d *u3d, u8 ep_num, u8 direction, int stall)
921 {
922         u32 tmp;
923         struct mv_u3d_ep *ep = u3d->eps;
924
925         dev_dbg(u3d->dev, "%s\n", __func__);
926         if (direction == MV_U3D_EP_DIR_OUT) {
927                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
928                 if (stall)
929                         tmp |= MV_U3D_EPXCR_EP_HALT;
930                 else
931                         tmp &= ~MV_U3D_EPXCR_EP_HALT;
932                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
933         } else {
934                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
935                 if (stall)
936                         tmp |= MV_U3D_EPXCR_EP_HALT;
937                 else
938                         tmp &= ~MV_U3D_EPXCR_EP_HALT;
939                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
940         }
941 }
942
943 static int mv_u3d_ep_set_halt_wedge(struct usb_ep *_ep, int halt, int wedge)
944 {
945         struct mv_u3d_ep *ep;
946         unsigned long flags;
947         int status = 0;
948         struct mv_u3d *u3d;
949
950         ep = container_of(_ep, struct mv_u3d_ep, ep);
951         u3d = ep->u3d;
952         if (!ep->ep.desc) {
953                 status = -EINVAL;
954                 goto out;
955         }
956
957         if (ep->ep.desc->bmAttributes == USB_ENDPOINT_XFER_ISOC) {
958                 status = -EOPNOTSUPP;
959                 goto out;
960         }
961
962         /*
963          * Attempt to halt IN ep will fail if any transfer requests
964          * are still queue
965          */
966         if (halt && (mv_u3d_ep_dir(ep) == MV_U3D_EP_DIR_IN)
967                         && !list_empty(&ep->queue)) {
968                 status = -EAGAIN;
969                 goto out;
970         }
971
972         spin_lock_irqsave(&ep->u3d->lock, flags);
973         mv_u3d_ep_set_stall(u3d, ep->ep_num, mv_u3d_ep_dir(ep), halt);
974         if (halt && wedge)
975                 ep->wedge = 1;
976         else if (!halt)
977                 ep->wedge = 0;
978         spin_unlock_irqrestore(&ep->u3d->lock, flags);
979
980         if (ep->ep_num == 0)
981                 u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
982 out:
983         return status;
984 }
985
986 static int mv_u3d_ep_set_halt(struct usb_ep *_ep, int halt)
987 {
988         return mv_u3d_ep_set_halt_wedge(_ep, halt, 0);
989 }
990
991 static int mv_u3d_ep_set_wedge(struct usb_ep *_ep)
992 {
993         return mv_u3d_ep_set_halt_wedge(_ep, 1, 1);
994 }
995
996 static const struct usb_ep_ops mv_u3d_ep_ops = {
997         .enable         = mv_u3d_ep_enable,
998         .disable        = mv_u3d_ep_disable,
999
1000         .alloc_request  = mv_u3d_alloc_request,
1001         .free_request   = mv_u3d_free_request,
1002
1003         .queue          = mv_u3d_ep_queue,
1004         .dequeue        = mv_u3d_ep_dequeue,
1005
1006         .set_wedge      = mv_u3d_ep_set_wedge,
1007         .set_halt       = mv_u3d_ep_set_halt,
1008         .fifo_flush     = mv_u3d_ep_fifo_flush,
1009 };
1010
1011 static void mv_u3d_controller_stop(struct mv_u3d *u3d)
1012 {
1013         u32 tmp;
1014
1015         if (!u3d->clock_gating && u3d->vbus_valid_detect)
1016                 iowrite32(MV_U3D_INTR_ENABLE_VBUS_VALID,
1017                                 &u3d->vuc_regs->intrenable);
1018         else
1019                 iowrite32(0, &u3d->vuc_regs->intrenable);
1020         iowrite32(~0x0, &u3d->vuc_regs->endcomplete);
1021         iowrite32(~0x0, &u3d->vuc_regs->trbunderrun);
1022         iowrite32(~0x0, &u3d->vuc_regs->trbcomplete);
1023         iowrite32(~0x0, &u3d->vuc_regs->linkchange);
1024         iowrite32(0x1, &u3d->vuc_regs->setuplock);
1025
1026         /* Reset the RUN bit in the command register to stop USB */
1027         tmp = ioread32(&u3d->op_regs->usbcmd);
1028         tmp &= ~MV_U3D_CMD_RUN_STOP;
1029         iowrite32(tmp, &u3d->op_regs->usbcmd);
1030         dev_dbg(u3d->dev, "after u3d_stop, USBCMD 0x%x\n",
1031                 ioread32(&u3d->op_regs->usbcmd));
1032 }
1033
1034 static void mv_u3d_controller_start(struct mv_u3d *u3d)
1035 {
1036         u32 usbintr;
1037         u32 temp;
1038
1039         /* enable link LTSSM state machine */
1040         temp = ioread32(&u3d->vuc_regs->ltssm);
1041         temp |= MV_U3D_LTSSM_PHY_INIT_DONE;
1042         iowrite32(temp, &u3d->vuc_regs->ltssm);
1043
1044         /* Enable interrupts */
1045         usbintr = MV_U3D_INTR_ENABLE_LINK_CHG | MV_U3D_INTR_ENABLE_TXDESC_ERR |
1046                 MV_U3D_INTR_ENABLE_RXDESC_ERR | MV_U3D_INTR_ENABLE_TX_COMPLETE |
1047                 MV_U3D_INTR_ENABLE_RX_COMPLETE | MV_U3D_INTR_ENABLE_SETUP |
1048                 (u3d->vbus_valid_detect ? MV_U3D_INTR_ENABLE_VBUS_VALID : 0);
1049         iowrite32(usbintr, &u3d->vuc_regs->intrenable);
1050
1051         /* Enable ctrl ep */
1052         iowrite32(0x1, &u3d->vuc_regs->ctrlepenable);
1053
1054         /* Set the Run bit in the command register */
1055         iowrite32(MV_U3D_CMD_RUN_STOP, &u3d->op_regs->usbcmd);
1056         dev_dbg(u3d->dev, "after u3d_start, USBCMD 0x%x\n",
1057                 ioread32(&u3d->op_regs->usbcmd));
1058 }
1059
1060 static int mv_u3d_controller_reset(struct mv_u3d *u3d)
1061 {
1062         unsigned int loops;
1063         u32 tmp;
1064
1065         /* Stop the controller */
1066         tmp = ioread32(&u3d->op_regs->usbcmd);
1067         tmp &= ~MV_U3D_CMD_RUN_STOP;
1068         iowrite32(tmp, &u3d->op_regs->usbcmd);
1069
1070         /* Reset the controller to get default values */
1071         iowrite32(MV_U3D_CMD_CTRL_RESET, &u3d->op_regs->usbcmd);
1072
1073         /* wait for reset to complete */
1074         loops = LOOPS(MV_U3D_RESET_TIMEOUT);
1075         while (ioread32(&u3d->op_regs->usbcmd) & MV_U3D_CMD_CTRL_RESET) {
1076                 if (loops == 0) {
1077                         dev_err(u3d->dev,
1078                                 "Wait for RESET completed TIMEOUT\n");
1079                         return -ETIMEDOUT;
1080                 }
1081                 loops--;
1082                 udelay(LOOPS_USEC);
1083         }
1084
1085         /* Configure the Endpoint Context Address */
1086         iowrite32(u3d->ep_context_dma, &u3d->op_regs->dcbaapl);
1087         iowrite32(0, &u3d->op_regs->dcbaaph);
1088
1089         return 0;
1090 }
1091
1092 static int mv_u3d_enable(struct mv_u3d *u3d)
1093 {
1094         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1095         int retval;
1096
1097         if (u3d->active)
1098                 return 0;
1099
1100         if (!u3d->clock_gating) {
1101                 u3d->active = 1;
1102                 return 0;
1103         }
1104
1105         dev_dbg(u3d->dev, "enable u3d\n");
1106         clk_enable(u3d->clk);
1107         if (pdata->phy_init) {
1108                 retval = pdata->phy_init(u3d->phy_regs);
1109                 if (retval) {
1110                         dev_err(u3d->dev,
1111                                 "init phy error %d\n", retval);
1112                         clk_disable(u3d->clk);
1113                         return retval;
1114                 }
1115         }
1116         u3d->active = 1;
1117
1118         return 0;
1119 }
1120
1121 static void mv_u3d_disable(struct mv_u3d *u3d)
1122 {
1123         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1124         if (u3d->clock_gating && u3d->active) {
1125                 dev_dbg(u3d->dev, "disable u3d\n");
1126                 if (pdata->phy_deinit)
1127                         pdata->phy_deinit(u3d->phy_regs);
1128                 clk_disable(u3d->clk);
1129                 u3d->active = 0;
1130         }
1131 }
1132
1133 static int mv_u3d_vbus_session(struct usb_gadget *gadget, int is_active)
1134 {
1135         struct mv_u3d *u3d;
1136         unsigned long flags;
1137         int retval = 0;
1138
1139         u3d = container_of(gadget, struct mv_u3d, gadget);
1140
1141         spin_lock_irqsave(&u3d->lock, flags);
1142
1143         u3d->vbus_active = (is_active != 0);
1144         dev_dbg(u3d->dev, "%s: softconnect %d, vbus_active %d\n",
1145                 __func__, u3d->softconnect, u3d->vbus_active);
1146         /*
1147          * 1. external VBUS detect: we can disable/enable clock on demand.
1148          * 2. UDC VBUS detect: we have to enable clock all the time.
1149          * 3. No VBUS detect: we have to enable clock all the time.
1150          */
1151         if (u3d->driver && u3d->softconnect && u3d->vbus_active) {
1152                 retval = mv_u3d_enable(u3d);
1153                 if (retval == 0) {
1154                         /*
1155                          * after clock is disabled, we lost all the register
1156                          *  context. We have to re-init registers
1157                          */
1158                         mv_u3d_controller_reset(u3d);
1159                         mv_u3d_ep0_reset(u3d);
1160                         mv_u3d_controller_start(u3d);
1161                 }
1162         } else if (u3d->driver && u3d->softconnect) {
1163                 if (!u3d->active)
1164                         goto out;
1165
1166                 /* stop all the transfer in queue*/
1167                 mv_u3d_stop_activity(u3d, u3d->driver);
1168                 mv_u3d_controller_stop(u3d);
1169                 mv_u3d_disable(u3d);
1170         }
1171
1172 out:
1173         spin_unlock_irqrestore(&u3d->lock, flags);
1174         return retval;
1175 }
1176
1177 /* constrain controller's VBUS power usage
1178  * This call is used by gadget drivers during SET_CONFIGURATION calls,
1179  * reporting how much power the device may consume.  For example, this
1180  * could affect how quickly batteries are recharged.
1181  *
1182  * Returns zero on success, else negative errno.
1183  */
1184 static int mv_u3d_vbus_draw(struct usb_gadget *gadget, unsigned mA)
1185 {
1186         struct mv_u3d *u3d = container_of(gadget, struct mv_u3d, gadget);
1187
1188         u3d->power = mA;
1189
1190         return 0;
1191 }
1192
1193 static int mv_u3d_pullup(struct usb_gadget *gadget, int is_on)
1194 {
1195         struct mv_u3d *u3d = container_of(gadget, struct mv_u3d, gadget);
1196         unsigned long flags;
1197         int retval = 0;
1198
1199         spin_lock_irqsave(&u3d->lock, flags);
1200
1201         dev_dbg(u3d->dev, "%s: softconnect %d, vbus_active %d\n",
1202                 __func__, u3d->softconnect, u3d->vbus_active);
1203         u3d->softconnect = (is_on != 0);
1204         if (u3d->driver && u3d->softconnect && u3d->vbus_active) {
1205                 retval = mv_u3d_enable(u3d);
1206                 if (retval == 0) {
1207                         /*
1208                          * after clock is disabled, we lost all the register
1209                          *  context. We have to re-init registers
1210                          */
1211                         mv_u3d_controller_reset(u3d);
1212                         mv_u3d_ep0_reset(u3d);
1213                         mv_u3d_controller_start(u3d);
1214                 }
1215         } else if (u3d->driver && u3d->vbus_active) {
1216                 /* stop all the transfer in queue*/
1217                 mv_u3d_stop_activity(u3d, u3d->driver);
1218                 mv_u3d_controller_stop(u3d);
1219                 mv_u3d_disable(u3d);
1220         }
1221
1222         spin_unlock_irqrestore(&u3d->lock, flags);
1223
1224         return retval;
1225 }
1226
1227 static int mv_u3d_start(struct usb_gadget *g,
1228                 struct usb_gadget_driver *driver)
1229 {
1230         struct mv_u3d *u3d = container_of(g, struct mv_u3d, gadget);
1231         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1232         unsigned long flags;
1233
1234         if (u3d->driver)
1235                 return -EBUSY;
1236
1237         spin_lock_irqsave(&u3d->lock, flags);
1238
1239         if (!u3d->clock_gating) {
1240                 clk_enable(u3d->clk);
1241                 if (pdata->phy_init)
1242                         pdata->phy_init(u3d->phy_regs);
1243         }
1244
1245         /* hook up the driver ... */
1246         driver->driver.bus = NULL;
1247         u3d->driver = driver;
1248
1249         u3d->ep0_dir = USB_DIR_OUT;
1250
1251         spin_unlock_irqrestore(&u3d->lock, flags);
1252
1253         u3d->vbus_valid_detect = 1;
1254
1255         return 0;
1256 }
1257
1258 static int mv_u3d_stop(struct usb_gadget *g)
1259 {
1260         struct mv_u3d *u3d = container_of(g, struct mv_u3d, gadget);
1261         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1262         unsigned long flags;
1263
1264         u3d->vbus_valid_detect = 0;
1265         spin_lock_irqsave(&u3d->lock, flags);
1266
1267         /* enable clock to access controller register */
1268         clk_enable(u3d->clk);
1269         if (pdata->phy_init)
1270                 pdata->phy_init(u3d->phy_regs);
1271
1272         mv_u3d_controller_stop(u3d);
1273         /* stop all usb activities */
1274         u3d->gadget.speed = USB_SPEED_UNKNOWN;
1275         mv_u3d_stop_activity(u3d, NULL);
1276         mv_u3d_disable(u3d);
1277
1278         if (pdata->phy_deinit)
1279                 pdata->phy_deinit(u3d->phy_regs);
1280         clk_disable(u3d->clk);
1281
1282         spin_unlock_irqrestore(&u3d->lock, flags);
1283
1284         u3d->driver = NULL;
1285
1286         return 0;
1287 }
1288
1289 /* device controller usb_gadget_ops structure */
1290 static const struct usb_gadget_ops mv_u3d_ops = {
1291         /* notify controller that VBUS is powered or not */
1292         .vbus_session   = mv_u3d_vbus_session,
1293
1294         /* constrain controller's VBUS power usage */
1295         .vbus_draw      = mv_u3d_vbus_draw,
1296
1297         .pullup         = mv_u3d_pullup,
1298         .udc_start      = mv_u3d_start,
1299         .udc_stop       = mv_u3d_stop,
1300 };
1301
1302 static int mv_u3d_eps_init(struct mv_u3d *u3d)
1303 {
1304         struct mv_u3d_ep        *ep;
1305         char name[14];
1306         int i;
1307
1308         /* initialize ep0, ep0 in/out use eps[1] */
1309         ep = &u3d->eps[1];
1310         ep->u3d = u3d;
1311         strncpy(ep->name, "ep0", sizeof(ep->name));
1312         ep->ep.name = ep->name;
1313         ep->ep.ops = &mv_u3d_ep_ops;
1314         ep->wedge = 0;
1315         usb_ep_set_maxpacket_limit(&ep->ep, MV_U3D_EP0_MAX_PKT_SIZE);
1316         ep->ep.caps.type_control = true;
1317         ep->ep.caps.dir_in = true;
1318         ep->ep.caps.dir_out = true;
1319         ep->ep_num = 0;
1320         ep->ep.desc = &mv_u3d_ep0_desc;
1321         INIT_LIST_HEAD(&ep->queue);
1322         INIT_LIST_HEAD(&ep->req_list);
1323         ep->ep_type = USB_ENDPOINT_XFER_CONTROL;
1324
1325         /* add ep0 ep_context */
1326         ep->ep_context = &u3d->ep_context[1];
1327
1328         /* initialize other endpoints */
1329         for (i = 2; i < u3d->max_eps * 2; i++) {
1330                 ep = &u3d->eps[i];
1331                 if (i & 1) {
1332                         snprintf(name, sizeof(name), "ep%din", i >> 1);
1333                         ep->direction = MV_U3D_EP_DIR_IN;
1334                         ep->ep.caps.dir_in = true;
1335                 } else {
1336                         snprintf(name, sizeof(name), "ep%dout", i >> 1);
1337                         ep->direction = MV_U3D_EP_DIR_OUT;
1338                         ep->ep.caps.dir_out = true;
1339                 }
1340                 ep->u3d = u3d;
1341                 strncpy(ep->name, name, sizeof(ep->name));
1342                 ep->ep.name = ep->name;
1343
1344                 ep->ep.caps.type_iso = true;
1345                 ep->ep.caps.type_bulk = true;
1346                 ep->ep.caps.type_int = true;
1347
1348                 ep->ep.ops = &mv_u3d_ep_ops;
1349                 usb_ep_set_maxpacket_limit(&ep->ep, (unsigned short) ~0);
1350                 ep->ep_num = i / 2;
1351
1352                 INIT_LIST_HEAD(&ep->queue);
1353                 list_add_tail(&ep->ep.ep_list, &u3d->gadget.ep_list);
1354
1355                 INIT_LIST_HEAD(&ep->req_list);
1356                 spin_lock_init(&ep->req_lock);
1357                 ep->ep_context = &u3d->ep_context[i];
1358         }
1359
1360         return 0;
1361 }
1362
1363 /* delete all endpoint requests, called with spinlock held */
1364 static void mv_u3d_nuke(struct mv_u3d_ep *ep, int status)
1365 {
1366         /* endpoint fifo flush */
1367         mv_u3d_ep_fifo_flush(&ep->ep);
1368
1369         while (!list_empty(&ep->queue)) {
1370                 struct mv_u3d_req *req = NULL;
1371                 req = list_entry(ep->queue.next, struct mv_u3d_req, queue);
1372                 mv_u3d_done(ep, req, status);
1373         }
1374 }
1375
1376 /* stop all USB activities */
1377 static
1378 void mv_u3d_stop_activity(struct mv_u3d *u3d, struct usb_gadget_driver *driver)
1379 {
1380         struct mv_u3d_ep        *ep;
1381
1382         mv_u3d_nuke(&u3d->eps[1], -ESHUTDOWN);
1383
1384         list_for_each_entry(ep, &u3d->gadget.ep_list, ep.ep_list) {
1385                 mv_u3d_nuke(ep, -ESHUTDOWN);
1386         }
1387
1388         /* report disconnect; the driver is already quiesced */
1389         if (driver) {
1390                 spin_unlock(&u3d->lock);
1391                 driver->disconnect(&u3d->gadget);
1392                 spin_lock(&u3d->lock);
1393         }
1394 }
1395
1396 static void mv_u3d_irq_process_error(struct mv_u3d *u3d)
1397 {
1398         /* Increment the error count */
1399         u3d->errors++;
1400         dev_err(u3d->dev, "%s\n", __func__);
1401 }
1402
1403 static void mv_u3d_irq_process_link_change(struct mv_u3d *u3d)
1404 {
1405         u32 linkchange;
1406
1407         linkchange = ioread32(&u3d->vuc_regs->linkchange);
1408         iowrite32(linkchange, &u3d->vuc_regs->linkchange);
1409
1410         dev_dbg(u3d->dev, "linkchange: 0x%x\n", linkchange);
1411
1412         if (linkchange & MV_U3D_LINK_CHANGE_LINK_UP) {
1413                 dev_dbg(u3d->dev, "link up: ltssm state: 0x%x\n",
1414                         ioread32(&u3d->vuc_regs->ltssmstate));
1415
1416                 u3d->usb_state = USB_STATE_DEFAULT;
1417                 u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
1418                 u3d->ep0_state = MV_U3D_WAIT_FOR_SETUP;
1419
1420                 /* set speed */
1421                 u3d->gadget.speed = USB_SPEED_SUPER;
1422         }
1423
1424         if (linkchange & MV_U3D_LINK_CHANGE_SUSPEND) {
1425                 dev_dbg(u3d->dev, "link suspend\n");
1426                 u3d->resume_state = u3d->usb_state;
1427                 u3d->usb_state = USB_STATE_SUSPENDED;
1428         }
1429
1430         if (linkchange & MV_U3D_LINK_CHANGE_RESUME) {
1431                 dev_dbg(u3d->dev, "link resume\n");
1432                 u3d->usb_state = u3d->resume_state;
1433                 u3d->resume_state = 0;
1434         }
1435
1436         if (linkchange & MV_U3D_LINK_CHANGE_WRESET) {
1437                 dev_dbg(u3d->dev, "warm reset\n");
1438                 u3d->usb_state = USB_STATE_POWERED;
1439         }
1440
1441         if (linkchange & MV_U3D_LINK_CHANGE_HRESET) {
1442                 dev_dbg(u3d->dev, "hot reset\n");
1443                 u3d->usb_state = USB_STATE_DEFAULT;
1444         }
1445
1446         if (linkchange & MV_U3D_LINK_CHANGE_INACT)
1447                 dev_dbg(u3d->dev, "inactive\n");
1448
1449         if (linkchange & MV_U3D_LINK_CHANGE_DISABLE_AFTER_U0)
1450                 dev_dbg(u3d->dev, "ss.disabled\n");
1451
1452         if (linkchange & MV_U3D_LINK_CHANGE_VBUS_INVALID) {
1453                 dev_dbg(u3d->dev, "vbus invalid\n");
1454                 u3d->usb_state = USB_STATE_ATTACHED;
1455                 u3d->vbus_valid_detect = 1;
1456                 /* if external vbus detect is not supported,
1457                  * we handle it here.
1458                  */
1459                 if (!u3d->vbus) {
1460                         spin_unlock(&u3d->lock);
1461                         mv_u3d_vbus_session(&u3d->gadget, 0);
1462                         spin_lock(&u3d->lock);
1463                 }
1464         }
1465 }
1466
1467 static void mv_u3d_ch9setaddress(struct mv_u3d *u3d,
1468                                 struct usb_ctrlrequest *setup)
1469 {
1470         u32 tmp;
1471
1472         if (u3d->usb_state != USB_STATE_DEFAULT) {
1473                 dev_err(u3d->dev,
1474                         "%s, cannot setaddr in this state (%d)\n",
1475                         __func__, u3d->usb_state);
1476                 goto err;
1477         }
1478
1479         u3d->dev_addr = (u8)setup->wValue;
1480
1481         dev_dbg(u3d->dev, "%s: 0x%x\n", __func__, u3d->dev_addr);
1482
1483         if (u3d->dev_addr > 127) {
1484                 dev_err(u3d->dev,
1485                         "%s, u3d address is wrong (out of range)\n", __func__);
1486                 u3d->dev_addr = 0;
1487                 goto err;
1488         }
1489
1490         /* update usb state */
1491         u3d->usb_state = USB_STATE_ADDRESS;
1492
1493         /* set the new address */
1494         tmp = ioread32(&u3d->vuc_regs->devaddrtiebrkr);
1495         tmp &= ~0x7F;
1496         tmp |= (u32)u3d->dev_addr;
1497         iowrite32(tmp, &u3d->vuc_regs->devaddrtiebrkr);
1498
1499         return;
1500 err:
1501         mv_u3d_ep0_stall(u3d);
1502 }
1503
1504 static int mv_u3d_is_set_configuration(struct usb_ctrlrequest *setup)
1505 {
1506         if ((setup->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD)
1507                 if (setup->bRequest == USB_REQ_SET_CONFIGURATION)
1508                         return 1;
1509
1510         return 0;
1511 }
1512
1513 static void mv_u3d_handle_setup_packet(struct mv_u3d *u3d, u8 ep_num,
1514         struct usb_ctrlrequest *setup)
1515         __releases(&u3c->lock)
1516         __acquires(&u3c->lock)
1517 {
1518         bool delegate = false;
1519
1520         mv_u3d_nuke(&u3d->eps[ep_num * 2 + MV_U3D_EP_DIR_IN], -ESHUTDOWN);
1521
1522         dev_dbg(u3d->dev, "SETUP %02x.%02x v%04x i%04x l%04x\n",
1523                         setup->bRequestType, setup->bRequest,
1524                         setup->wValue, setup->wIndex, setup->wLength);
1525
1526         /* We process some stardard setup requests here */
1527         if ((setup->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD) {
1528                 switch (setup->bRequest) {
1529                 case USB_REQ_GET_STATUS:
1530                         delegate = true;
1531                         break;
1532
1533                 case USB_REQ_SET_ADDRESS:
1534                         mv_u3d_ch9setaddress(u3d, setup);
1535                         break;
1536
1537                 case USB_REQ_CLEAR_FEATURE:
1538                         delegate = true;
1539                         break;
1540
1541                 case USB_REQ_SET_FEATURE:
1542                         delegate = true;
1543                         break;
1544
1545                 default:
1546                         delegate = true;
1547                 }
1548         } else
1549                 delegate = true;
1550
1551         /* delegate USB standard requests to the gadget driver */
1552         if (delegate) {
1553                 /* USB requests handled by gadget */
1554                 if (setup->wLength) {
1555                         /* DATA phase from gadget, STATUS phase from u3d */
1556                         u3d->ep0_dir = (setup->bRequestType & USB_DIR_IN)
1557                                         ? MV_U3D_EP_DIR_IN : MV_U3D_EP_DIR_OUT;
1558                         spin_unlock(&u3d->lock);
1559                         if (u3d->driver->setup(&u3d->gadget,
1560                                 &u3d->local_setup_buff) < 0) {
1561                                 dev_err(u3d->dev, "setup error!\n");
1562                                 mv_u3d_ep0_stall(u3d);
1563                         }
1564                         spin_lock(&u3d->lock);
1565                 } else {
1566                         /* no DATA phase, STATUS phase from gadget */
1567                         u3d->ep0_dir = MV_U3D_EP_DIR_IN;
1568                         u3d->ep0_state = MV_U3D_STATUS_STAGE;
1569                         spin_unlock(&u3d->lock);
1570                         if (u3d->driver->setup(&u3d->gadget,
1571                                 &u3d->local_setup_buff) < 0)
1572                                 mv_u3d_ep0_stall(u3d);
1573                         spin_lock(&u3d->lock);
1574                 }
1575
1576                 if (mv_u3d_is_set_configuration(setup)) {
1577                         dev_dbg(u3d->dev, "u3d configured\n");
1578                         u3d->usb_state = USB_STATE_CONFIGURED;
1579                 }
1580         }
1581 }
1582
1583 static void mv_u3d_get_setup_data(struct mv_u3d *u3d, u8 ep_num, u8 *buffer_ptr)
1584 {
1585         struct mv_u3d_ep_context *epcontext;
1586
1587         epcontext = &u3d->ep_context[ep_num * 2 + MV_U3D_EP_DIR_IN];
1588
1589         /* Copy the setup packet to local buffer */
1590         memcpy(buffer_ptr, (u8 *) &epcontext->setup_buffer, 8);
1591 }
1592
1593 static void mv_u3d_irq_process_setup(struct mv_u3d *u3d)
1594 {
1595         u32 tmp, i;
1596         /* Process all Setup packet received interrupts */
1597         tmp = ioread32(&u3d->vuc_regs->setuplock);
1598         if (tmp) {
1599                 for (i = 0; i < u3d->max_eps; i++) {
1600                         if (tmp & (1 << i)) {
1601                                 mv_u3d_get_setup_data(u3d, i,
1602                                         (u8 *)(&u3d->local_setup_buff));
1603                                 mv_u3d_handle_setup_packet(u3d, i,
1604                                         &u3d->local_setup_buff);
1605                         }
1606                 }
1607         }
1608
1609         iowrite32(tmp, &u3d->vuc_regs->setuplock);
1610 }
1611
1612 static void mv_u3d_irq_process_tr_complete(struct mv_u3d *u3d)
1613 {
1614         u32 tmp, bit_pos;
1615         int i, ep_num = 0, direction = 0;
1616         struct mv_u3d_ep        *curr_ep;
1617         struct mv_u3d_req *curr_req, *temp_req;
1618         int status;
1619
1620         tmp = ioread32(&u3d->vuc_regs->endcomplete);
1621
1622         dev_dbg(u3d->dev, "tr_complete: ep: 0x%x\n", tmp);
1623         if (!tmp)
1624                 return;
1625         iowrite32(tmp, &u3d->vuc_regs->endcomplete);
1626
1627         for (i = 0; i < u3d->max_eps * 2; i++) {
1628                 ep_num = i >> 1;
1629                 direction = i % 2;
1630
1631                 bit_pos = 1 << (ep_num + 16 * direction);
1632
1633                 if (!(bit_pos & tmp))
1634                         continue;
1635
1636                 if (i == 0)
1637                         curr_ep = &u3d->eps[1];
1638                 else
1639                         curr_ep = &u3d->eps[i];
1640
1641                 /* remove req out of ep request list after completion */
1642                 dev_dbg(u3d->dev, "tr comp: check req_list\n");
1643                 spin_lock(&curr_ep->req_lock);
1644                 if (!list_empty(&curr_ep->req_list)) {
1645                         struct mv_u3d_req *req;
1646                         req = list_entry(curr_ep->req_list.next,
1647                                                 struct mv_u3d_req, list);
1648                         list_del_init(&req->list);
1649                         curr_ep->processing = 0;
1650                 }
1651                 spin_unlock(&curr_ep->req_lock);
1652
1653                 /* process the req queue until an uncomplete request */
1654                 list_for_each_entry_safe(curr_req, temp_req,
1655                         &curr_ep->queue, queue) {
1656                         status = mv_u3d_process_ep_req(u3d, i, curr_req);
1657                         if (status)
1658                                 break;
1659                         /* write back status to req */
1660                         curr_req->req.status = status;
1661
1662                         /* ep0 request completion */
1663                         if (ep_num == 0) {
1664                                 mv_u3d_done(curr_ep, curr_req, 0);
1665                                 break;
1666                         } else {
1667                                 mv_u3d_done(curr_ep, curr_req, status);
1668                         }
1669                 }
1670
1671                 dev_dbg(u3d->dev, "call mv_u3d_start_queue from ep complete\n");
1672                 mv_u3d_start_queue(curr_ep);
1673         }
1674 }
1675
1676 static irqreturn_t mv_u3d_irq(int irq, void *dev)
1677 {
1678         struct mv_u3d *u3d = (struct mv_u3d *)dev;
1679         u32 status, intr;
1680         u32 bridgesetting;
1681         u32 trbunderrun;
1682
1683         spin_lock(&u3d->lock);
1684
1685         status = ioread32(&u3d->vuc_regs->intrcause);
1686         intr = ioread32(&u3d->vuc_regs->intrenable);
1687         status &= intr;
1688
1689         if (status == 0) {
1690                 spin_unlock(&u3d->lock);
1691                 dev_err(u3d->dev, "irq error!\n");
1692                 return IRQ_NONE;
1693         }
1694
1695         if (status & MV_U3D_USBINT_VBUS_VALID) {
1696                 bridgesetting = ioread32(&u3d->vuc_regs->bridgesetting);
1697                 if (bridgesetting & MV_U3D_BRIDGE_SETTING_VBUS_VALID) {
1698                         /* write vbus valid bit of bridge setting to clear */
1699                         bridgesetting = MV_U3D_BRIDGE_SETTING_VBUS_VALID;
1700                         iowrite32(bridgesetting, &u3d->vuc_regs->bridgesetting);
1701                         dev_dbg(u3d->dev, "vbus valid\n");
1702
1703                         u3d->usb_state = USB_STATE_POWERED;
1704                         u3d->vbus_valid_detect = 0;
1705                         /* if external vbus detect is not supported,
1706                          * we handle it here.
1707                          */
1708                         if (!u3d->vbus) {
1709                                 spin_unlock(&u3d->lock);
1710                                 mv_u3d_vbus_session(&u3d->gadget, 1);
1711                                 spin_lock(&u3d->lock);
1712                         }
1713                 } else
1714                         dev_err(u3d->dev, "vbus bit is not set\n");
1715         }
1716
1717         /* RX data is already in the 16KB FIFO.*/
1718         if (status & MV_U3D_USBINT_UNDER_RUN) {
1719                 trbunderrun = ioread32(&u3d->vuc_regs->trbunderrun);
1720                 dev_err(u3d->dev, "under run, ep%d\n", trbunderrun);
1721                 iowrite32(trbunderrun, &u3d->vuc_regs->trbunderrun);
1722                 mv_u3d_irq_process_error(u3d);
1723         }
1724
1725         if (status & (MV_U3D_USBINT_RXDESC_ERR | MV_U3D_USBINT_TXDESC_ERR)) {
1726                 /* write one to clear */
1727                 iowrite32(status & (MV_U3D_USBINT_RXDESC_ERR
1728                         | MV_U3D_USBINT_TXDESC_ERR),
1729                         &u3d->vuc_regs->intrcause);
1730                 dev_err(u3d->dev, "desc err 0x%x\n", status);
1731                 mv_u3d_irq_process_error(u3d);
1732         }
1733
1734         if (status & MV_U3D_USBINT_LINK_CHG)
1735                 mv_u3d_irq_process_link_change(u3d);
1736
1737         if (status & MV_U3D_USBINT_TX_COMPLETE)
1738                 mv_u3d_irq_process_tr_complete(u3d);
1739
1740         if (status & MV_U3D_USBINT_RX_COMPLETE)
1741                 mv_u3d_irq_process_tr_complete(u3d);
1742
1743         if (status & MV_U3D_USBINT_SETUP)
1744                 mv_u3d_irq_process_setup(u3d);
1745
1746         spin_unlock(&u3d->lock);
1747         return IRQ_HANDLED;
1748 }
1749
1750 static int mv_u3d_remove(struct platform_device *dev)
1751 {
1752         struct mv_u3d *u3d = platform_get_drvdata(dev);
1753
1754         BUG_ON(u3d == NULL);
1755
1756         usb_del_gadget_udc(&u3d->gadget);
1757
1758         /* free memory allocated in probe */
1759         dma_pool_destroy(u3d->trb_pool);
1760
1761         if (u3d->ep_context)
1762                 dma_free_coherent(&dev->dev, u3d->ep_context_size,
1763                         u3d->ep_context, u3d->ep_context_dma);
1764
1765         kfree(u3d->eps);
1766
1767         if (u3d->irq)
1768                 free_irq(u3d->irq, u3d);
1769
1770         if (u3d->cap_regs)
1771                 iounmap(u3d->cap_regs);
1772         u3d->cap_regs = NULL;
1773
1774         kfree(u3d->status_req);
1775
1776         clk_put(u3d->clk);
1777
1778         kfree(u3d);
1779
1780         return 0;
1781 }
1782
1783 static int mv_u3d_probe(struct platform_device *dev)
1784 {
1785         struct mv_u3d *u3d = NULL;
1786         struct mv_usb_platform_data *pdata = dev_get_platdata(&dev->dev);
1787         int retval = 0;
1788         struct resource *r;
1789         size_t size;
1790
1791         if (!dev_get_platdata(&dev->dev)) {
1792                 dev_err(&dev->dev, "missing platform_data\n");
1793                 retval = -ENODEV;
1794                 goto err_pdata;
1795         }
1796
1797         u3d = kzalloc(sizeof(*u3d), GFP_KERNEL);
1798         if (!u3d) {
1799                 retval = -ENOMEM;
1800                 goto err_alloc_private;
1801         }
1802
1803         spin_lock_init(&u3d->lock);
1804
1805         platform_set_drvdata(dev, u3d);
1806
1807         u3d->dev = &dev->dev;
1808         u3d->vbus = pdata->vbus;
1809
1810         u3d->clk = clk_get(&dev->dev, NULL);
1811         if (IS_ERR(u3d->clk)) {
1812                 retval = PTR_ERR(u3d->clk);
1813                 goto err_get_clk;
1814         }
1815
1816         r = platform_get_resource_byname(dev, IORESOURCE_MEM, "capregs");
1817         if (!r) {
1818                 dev_err(&dev->dev, "no I/O memory resource defined\n");
1819                 retval = -ENODEV;
1820                 goto err_get_cap_regs;
1821         }
1822
1823         u3d->cap_regs = (struct mv_u3d_cap_regs __iomem *)
1824                 ioremap(r->start, resource_size(r));
1825         if (!u3d->cap_regs) {
1826                 dev_err(&dev->dev, "failed to map I/O memory\n");
1827                 retval = -EBUSY;
1828                 goto err_map_cap_regs;
1829         } else {
1830                 dev_dbg(&dev->dev, "cap_regs address: 0x%lx/0x%lx\n",
1831                         (unsigned long) r->start,
1832                         (unsigned long) u3d->cap_regs);
1833         }
1834
1835         /* we will access controller register, so enable the u3d controller */
1836         retval = clk_enable(u3d->clk);
1837         if (retval) {
1838                 dev_err(&dev->dev, "clk_enable error %d\n", retval);
1839                 goto err_u3d_enable;
1840         }
1841
1842         if (pdata->phy_init) {
1843                 retval = pdata->phy_init(u3d->phy_regs);
1844                 if (retval) {
1845                         dev_err(&dev->dev, "init phy error %d\n", retval);
1846                         clk_disable(u3d->clk);
1847                         goto err_phy_init;
1848                 }
1849         }
1850
1851         u3d->op_regs = (struct mv_u3d_op_regs __iomem *)(u3d->cap_regs
1852                 + MV_U3D_USB3_OP_REGS_OFFSET);
1853
1854         u3d->vuc_regs = (struct mv_u3d_vuc_regs __iomem *)(u3d->cap_regs
1855                 + ioread32(&u3d->cap_regs->vuoff));
1856
1857         u3d->max_eps = 16;
1858
1859         /*
1860          * some platform will use usb to download image, it may not disconnect
1861          * usb gadget before loading kernel. So first stop u3d here.
1862          */
1863         mv_u3d_controller_stop(u3d);
1864         iowrite32(0xFFFFFFFF, &u3d->vuc_regs->intrcause);
1865
1866         if (pdata->phy_deinit)
1867                 pdata->phy_deinit(u3d->phy_regs);
1868         clk_disable(u3d->clk);
1869
1870         size = u3d->max_eps * sizeof(struct mv_u3d_ep_context) * 2;
1871         size = (size + MV_U3D_EP_CONTEXT_ALIGNMENT - 1)
1872                 & ~(MV_U3D_EP_CONTEXT_ALIGNMENT - 1);
1873         u3d->ep_context = dma_alloc_coherent(&dev->dev, size,
1874                                         &u3d->ep_context_dma, GFP_KERNEL);
1875         if (!u3d->ep_context) {
1876                 dev_err(&dev->dev, "allocate ep context memory failed\n");
1877                 retval = -ENOMEM;
1878                 goto err_alloc_ep_context;
1879         }
1880         u3d->ep_context_size = size;
1881
1882         /* create TRB dma_pool resource */
1883         u3d->trb_pool = dma_pool_create("u3d_trb",
1884                         &dev->dev,
1885                         sizeof(struct mv_u3d_trb_hw),
1886                         MV_U3D_TRB_ALIGNMENT,
1887                         MV_U3D_DMA_BOUNDARY);
1888
1889         if (!u3d->trb_pool) {
1890                 retval = -ENOMEM;
1891                 goto err_alloc_trb_pool;
1892         }
1893
1894         size = u3d->max_eps * sizeof(struct mv_u3d_ep) * 2;
1895         u3d->eps = kzalloc(size, GFP_KERNEL);
1896         if (!u3d->eps) {
1897                 retval = -ENOMEM;
1898                 goto err_alloc_eps;
1899         }
1900
1901         /* initialize ep0 status request structure */
1902         u3d->status_req = kzalloc(sizeof(struct mv_u3d_req) + 8, GFP_KERNEL);
1903         if (!u3d->status_req) {
1904                 retval = -ENOMEM;
1905                 goto err_alloc_status_req;
1906         }
1907         INIT_LIST_HEAD(&u3d->status_req->queue);
1908
1909         /* allocate a small amount of memory to get valid address */
1910         u3d->status_req->req.buf = (char *)u3d->status_req
1911                                         + sizeof(struct mv_u3d_req);
1912         u3d->status_req->req.dma = virt_to_phys(u3d->status_req->req.buf);
1913
1914         u3d->resume_state = USB_STATE_NOTATTACHED;
1915         u3d->usb_state = USB_STATE_ATTACHED;
1916         u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
1917         u3d->remote_wakeup = 0;
1918
1919         r = platform_get_resource(dev, IORESOURCE_IRQ, 0);
1920         if (!r) {
1921                 dev_err(&dev->dev, "no IRQ resource defined\n");
1922                 retval = -ENODEV;
1923                 goto err_get_irq;
1924         }
1925         u3d->irq = r->start;
1926
1927         /* initialize gadget structure */
1928         u3d->gadget.ops = &mv_u3d_ops;  /* usb_gadget_ops */
1929         u3d->gadget.ep0 = &u3d->eps[1].ep;      /* gadget ep0 */
1930         INIT_LIST_HEAD(&u3d->gadget.ep_list);   /* ep_list */
1931         u3d->gadget.speed = USB_SPEED_UNKNOWN;  /* speed */
1932
1933         /* the "gadget" abstracts/virtualizes the controller */
1934         u3d->gadget.name = driver_name;         /* gadget name */
1935
1936         mv_u3d_eps_init(u3d);
1937
1938         if (request_irq(u3d->irq, mv_u3d_irq,
1939                 IRQF_SHARED, driver_name, u3d)) {
1940                 u3d->irq = 0;
1941                 dev_err(&dev->dev, "Request irq %d for u3d failed\n",
1942                         u3d->irq);
1943                 retval = -ENODEV;
1944                 goto err_request_irq;
1945         }
1946
1947         /* external vbus detection */
1948         if (u3d->vbus) {
1949                 u3d->clock_gating = 1;
1950                 dev_err(&dev->dev, "external vbus detection\n");
1951         }
1952
1953         if (!u3d->clock_gating)
1954                 u3d->vbus_active = 1;
1955
1956         /* enable usb3 controller vbus detection */
1957         u3d->vbus_valid_detect = 1;
1958
1959         retval = usb_add_gadget_udc(&dev->dev, &u3d->gadget);
1960         if (retval)
1961                 goto err_unregister;
1962
1963         dev_dbg(&dev->dev, "successful probe usb3 device %s clock gating.\n",
1964                 u3d->clock_gating ? "with" : "without");
1965
1966         return 0;
1967
1968 err_unregister:
1969         free_irq(u3d->irq, u3d);
1970 err_get_irq:
1971 err_request_irq:
1972         kfree(u3d->status_req);
1973 err_alloc_status_req:
1974         kfree(u3d->eps);
1975 err_alloc_eps:
1976         dma_pool_destroy(u3d->trb_pool);
1977 err_alloc_trb_pool:
1978         dma_free_coherent(&dev->dev, u3d->ep_context_size,
1979                 u3d->ep_context, u3d->ep_context_dma);
1980 err_alloc_ep_context:
1981 err_phy_init:
1982 err_u3d_enable:
1983         iounmap(u3d->cap_regs);
1984 err_map_cap_regs:
1985 err_get_cap_regs:
1986         clk_put(u3d->clk);
1987 err_get_clk:
1988         kfree(u3d);
1989 err_alloc_private:
1990 err_pdata:
1991         return retval;
1992 }
1993
1994 #ifdef CONFIG_PM_SLEEP
1995 static int mv_u3d_suspend(struct device *dev)
1996 {
1997         struct mv_u3d *u3d = dev_get_drvdata(dev);
1998
1999         /*
2000          * only cable is unplugged, usb can suspend.
2001          * So do not care about clock_gating == 1, it is handled by
2002          * vbus session.
2003          */
2004         if (!u3d->clock_gating) {
2005                 mv_u3d_controller_stop(u3d);
2006
2007                 spin_lock_irq(&u3d->lock);
2008                 /* stop all usb activities */
2009                 mv_u3d_stop_activity(u3d, u3d->driver);
2010                 spin_unlock_irq(&u3d->lock);
2011
2012                 mv_u3d_disable(u3d);
2013         }
2014
2015         return 0;
2016 }
2017
2018 static int mv_u3d_resume(struct device *dev)
2019 {
2020         struct mv_u3d *u3d = dev_get_drvdata(dev);
2021         int retval;
2022
2023         if (!u3d->clock_gating) {
2024                 retval = mv_u3d_enable(u3d);
2025                 if (retval)
2026                         return retval;
2027
2028                 if (u3d->driver && u3d->softconnect) {
2029                         mv_u3d_controller_reset(u3d);
2030                         mv_u3d_ep0_reset(u3d);
2031                         mv_u3d_controller_start(u3d);
2032                 }
2033         }
2034
2035         return 0;
2036 }
2037 #endif
2038
2039 static SIMPLE_DEV_PM_OPS(mv_u3d_pm_ops, mv_u3d_suspend, mv_u3d_resume);
2040
2041 static void mv_u3d_shutdown(struct platform_device *dev)
2042 {
2043         struct mv_u3d *u3d = platform_get_drvdata(dev);
2044         u32 tmp;
2045
2046         tmp = ioread32(&u3d->op_regs->usbcmd);
2047         tmp &= ~MV_U3D_CMD_RUN_STOP;
2048         iowrite32(tmp, &u3d->op_regs->usbcmd);
2049 }
2050
2051 static struct platform_driver mv_u3d_driver = {
2052         .probe          = mv_u3d_probe,
2053         .remove         = mv_u3d_remove,
2054         .shutdown       = mv_u3d_shutdown,
2055         .driver         = {
2056                 .name   = "mv-u3d",
2057                 .pm     = &mv_u3d_pm_ops,
2058         },
2059 };
2060
2061 module_platform_driver(mv_u3d_driver);
2062 MODULE_ALIAS("platform:mv-u3d");
2063 MODULE_DESCRIPTION(DRIVER_DESC);
2064 MODULE_AUTHOR("Yu Xu <yuxu@marvell.com>");
2065 MODULE_LICENSE("GPL");