Lines Matching refs:ep

177 	struct usb_ep			ep;  member
199 #define to_ast_ep(__ep) container_of(__ep, struct ast_udc_ep, ep)
212 struct ast_udc_ep ep[AST_UDC_NUM_ENDPOINTS]; member
249 (e)->ep.name, ##__VA_ARGS__)
251 #define EP_DBG(ep, fmt, ...) ((void)(ep)) argument
274 #define ast_ep_read(ep, reg) \ argument
275 readl((ep)->ep_reg + (reg))
276 #define ast_ep_write(ep, val, reg) \ argument
277 writel((val), (ep)->ep_reg + (reg))
281 static void ast_udc_done(struct ast_udc_ep *ep, struct ast_udc_request *req, in ast_udc_done() argument
284 struct ast_udc_dev *udc = ep->udc; in ast_udc_done()
286 EP_DBG(ep, "req @%p, len (%d/%d), buf:0x%x, dir:0x%x\n", in ast_udc_done()
288 (u32)req->req.buf, ep->dir_in); in ast_udc_done()
298 EP_DBG(ep, "done req:%p, status:%d\n", req, status); in ast_udc_done()
301 usb_gadget_giveback_request(&ep->ep, &req->req); in ast_udc_done()
305 static void ast_udc_nuke(struct ast_udc_ep *ep, int status) in ast_udc_nuke() argument
309 while (!list_empty(&ep->queue)) { in ast_udc_nuke()
312 req = list_entry(ep->queue.next, struct ast_udc_request, in ast_udc_nuke()
314 ast_udc_done(ep, req, status); in ast_udc_nuke()
319 EP_DBG(ep, "Nuked %d request(s)\n", count); in ast_udc_nuke()
331 struct ast_udc_ep *ep; in ast_udc_stop_activity() local
335 ep = &udc->ep[i]; in ast_udc_stop_activity()
336 ep->stopped = 1; in ast_udc_stop_activity()
337 ast_udc_nuke(ep, -ESHUTDOWN); in ast_udc_stop_activity()
345 struct ast_udc_ep *ep = to_ast_ep(_ep); in ast_udc_ep_enable() local
346 struct ast_udc_dev *udc = ep->udc; in ast_udc_ep_enable()
353 if (!_ep || !ep || !desc || desc->bDescriptorType != USB_DT_ENDPOINT || in ast_udc_ep_enable()
354 maxpacket == 0 || maxpacket > ep->ep.maxpacket) { in ast_udc_ep_enable()
355 EP_DBG(ep, "Failed, invalid EP enable param\n"); in ast_udc_ep_enable()
360 EP_DBG(ep, "bogus device state\n"); in ast_udc_ep_enable()
364 EP_DBG(ep, "maxpacket:0x%x\n", maxpacket); in ast_udc_ep_enable()
368 ep->desc = desc; in ast_udc_ep_enable()
369 ep->stopped = 0; in ast_udc_ep_enable()
370 ep->ep.maxpacket = maxpacket; in ast_udc_ep_enable()
371 ep->chunk_max = AST_EP_DMA_DESC_MAX_LEN; in ast_udc_ep_enable()
380 ep->dir_in = dir_in; in ast_udc_ep_enable()
381 if (!ep->dir_in) in ast_udc_ep_enable()
384 EP_DBG(ep, "type %d, dir_in %d\n", type, dir_in); in ast_udc_ep_enable()
399 ep->desc_mode = udc->desc_mode && ep->descs_dma && ep->dir_in; in ast_udc_ep_enable()
400 if (ep->desc_mode) { in ast_udc_ep_enable()
401 ast_ep_write(ep, EP_DMA_CTRL_RESET, AST_UDC_EP_DMA_CTRL); in ast_udc_ep_enable()
402 ast_ep_write(ep, 0, AST_UDC_EP_DMA_STS); in ast_udc_ep_enable()
403 ast_ep_write(ep, ep->descs_dma, AST_UDC_EP_DMA_BUFF); in ast_udc_ep_enable()
406 ast_ep_write(ep, EP_DMA_CTRL_IN_LONG_MODE | EP_DMA_DESC_MODE, in ast_udc_ep_enable()
409 ep->descs_wptr = 0; in ast_udc_ep_enable()
412 ast_ep_write(ep, EP_DMA_CTRL_RESET, AST_UDC_EP_DMA_CTRL); in ast_udc_ep_enable()
413 ast_ep_write(ep, EP_DMA_SINGLE_STAGE, AST_UDC_EP_DMA_CTRL); in ast_udc_ep_enable()
414 ast_ep_write(ep, 0, AST_UDC_EP_DMA_STS); in ast_udc_ep_enable()
421 ast_ep_write(ep, ep_conf | EP_ENABLE, AST_UDC_EP_CONFIG); in ast_udc_ep_enable()
423 EP_DBG(ep, "ep_config: 0x%x\n", ast_ep_read(ep, AST_UDC_EP_CONFIG)); in ast_udc_ep_enable()
432 struct ast_udc_ep *ep = to_ast_ep(_ep); in ast_udc_ep_disable() local
433 struct ast_udc_dev *udc = ep->udc; in ast_udc_ep_disable()
438 ep->ep.desc = NULL; in ast_udc_ep_disable()
439 ep->stopped = 1; in ast_udc_ep_disable()
441 ast_udc_nuke(ep, -ESHUTDOWN); in ast_udc_ep_disable()
442 ast_ep_write(ep, 0, AST_UDC_EP_CONFIG); in ast_udc_ep_disable()
452 struct ast_udc_ep *ep = to_ast_ep(_ep); in ast_udc_ep_alloc_request() local
457 EP_DBG(ep, "request allocation failed\n"); in ast_udc_ep_alloc_request()
474 static int ast_dma_descriptor_setup(struct ast_udc_ep *ep, u32 dma_buf, in ast_dma_descriptor_setup() argument
477 struct ast_udc_dev *udc = ep->udc; in ast_dma_descriptor_setup()
483 if (!ep->descs) { in ast_dma_descriptor_setup()
485 ep->ep.name); in ast_dma_descriptor_setup()
492 EP_DBG(ep, "req @%p, %s:%d, %s:0x%x, %s:0x%x\n", req, in ast_dma_descriptor_setup()
493 "wptr", ep->descs_wptr, "dma_buf", dma_buf, in ast_dma_descriptor_setup()
499 ep->descs[ep->descs_wptr].des_0 = dma_buf + offset; in ast_dma_descriptor_setup()
501 if (chunk > ep->chunk_max) { in ast_dma_descriptor_setup()
502 ep->descs[ep->descs_wptr].des_1 = ep->chunk_max; in ast_dma_descriptor_setup()
504 ep->descs[ep->descs_wptr].des_1 = chunk; in ast_dma_descriptor_setup()
508 chunk -= ep->chunk_max; in ast_dma_descriptor_setup()
510 EP_DBG(ep, "descs[%d]: 0x%x 0x%x\n", in ast_dma_descriptor_setup()
511 ep->descs_wptr, in ast_dma_descriptor_setup()
512 ep->descs[ep->descs_wptr].des_0, in ast_dma_descriptor_setup()
513 ep->descs[ep->descs_wptr].des_1); in ast_dma_descriptor_setup()
516 req->saved_dma_wptr = ep->descs_wptr; in ast_dma_descriptor_setup()
518 ep->descs_wptr++; in ast_dma_descriptor_setup()
521 if (ep->descs_wptr >= AST_UDC_DESCS_COUNT) in ast_dma_descriptor_setup()
522 ep->descs_wptr = 0; in ast_dma_descriptor_setup()
524 offset = ep->chunk_max * count; in ast_dma_descriptor_setup()
530 static void ast_udc_epn_kick(struct ast_udc_ep *ep, struct ast_udc_request *req) in ast_udc_epn_kick() argument
536 tx_len = last > ep->ep.maxpacket ? ep->ep.maxpacket : last; in ast_udc_epn_kick()
538 EP_DBG(ep, "kick req @%p, len:%d, dir:%d\n", in ast_udc_epn_kick()
539 req, tx_len, ep->dir_in); in ast_udc_epn_kick()
541 ast_ep_write(ep, req->req.dma + req->req.actual, AST_UDC_EP_DMA_BUFF); in ast_udc_epn_kick()
544 ast_ep_write(ep, EP_DMA_SET_TX_SIZE(tx_len), AST_UDC_EP_DMA_STS); in ast_udc_epn_kick()
545 ast_ep_write(ep, EP_DMA_SET_TX_SIZE(tx_len) | EP_DMA_SINGLE_KICK, in ast_udc_epn_kick()
549 static void ast_udc_epn_kick_desc(struct ast_udc_ep *ep, in ast_udc_epn_kick_desc() argument
561 EP_DBG(ep, "kick req @%p, %s:%d, %s:0x%x, %s:0x%x (%d/%d), %s:0x%x\n", in ast_udc_epn_kick_desc()
562 req, "tx_len", tx_len, "dir_in", ep->dir_in, in ast_udc_epn_kick_desc()
567 if (!ast_dma_descriptor_setup(ep, req->req.dma + req->req.actual, in ast_udc_epn_kick_desc()
574 ast_ep_write(ep, ep->descs_wptr, AST_UDC_EP_DMA_STS); in ast_udc_epn_kick_desc()
576 EP_DBG(ep, "descs_wptr:%d, dstat:0x%x, dctrl:0x%x\n", in ast_udc_epn_kick_desc()
577 ep->descs_wptr, in ast_udc_epn_kick_desc()
578 ast_ep_read(ep, AST_UDC_EP_DMA_STS), in ast_udc_epn_kick_desc()
579 ast_ep_read(ep, AST_UDC_EP_DMA_CTRL)); in ast_udc_epn_kick_desc()
582 static void ast_udc_ep0_queue(struct ast_udc_ep *ep, in ast_udc_ep0_queue() argument
585 struct ast_udc_dev *udc = ep->udc; in ast_udc_ep0_queue()
590 tx_len = last > ep->ep.maxpacket ? ep->ep.maxpacket : last; in ast_udc_ep0_queue()
595 if (ep->dir_in) { in ast_udc_ep0_queue()
602 "dir_in", ep->dir_in); in ast_udc_ep0_queue()
615 "dir_in", ep->dir_in); in ast_udc_ep0_queue()
620 ep->dir_in = 0x1; in ast_udc_ep0_queue()
630 struct ast_udc_ep *ep = to_ast_ep(_ep); in ast_udc_ep_queue() local
631 struct ast_udc_dev *udc = ep->udc; in ast_udc_ep_queue()
641 if (ep->stopped) { in ast_udc_ep_queue()
648 list_add_tail(&req->queue, &ep->queue); in ast_udc_ep_queue()
654 rc = usb_gadget_map_request(&udc->gadget, &req->req, ep->dir_in); in ast_udc_ep_queue()
656 EP_DBG(ep, "Request mapping failure %d\n", rc); in ast_udc_ep_queue()
661 EP_DBG(ep, "enqueue req @%p\n", req); in ast_udc_ep_queue()
662 EP_DBG(ep, "l=%d, dma:0x%x, zero:%d, is_in:%d\n", in ast_udc_ep_queue()
663 _req->length, _req->dma, _req->zero, ep->dir_in); in ast_udc_ep_queue()
666 if (ep->ep.desc == NULL) { in ast_udc_ep_queue()
673 ast_udc_ep0_queue(ep, req); in ast_udc_ep_queue()
678 if (list_is_singular(&ep->queue)) { in ast_udc_ep_queue()
679 if (ep->desc_mode) in ast_udc_ep_queue()
680 ast_udc_epn_kick_desc(ep, req); in ast_udc_ep_queue()
682 ast_udc_epn_kick(ep, req); in ast_udc_ep_queue()
693 struct ast_udc_ep *ep = to_ast_ep(_ep); in ast_udc_ep_dequeue() local
694 struct ast_udc_dev *udc = ep->udc; in ast_udc_ep_dequeue()
702 list_for_each_entry(req, &ep->queue, queue) { in ast_udc_ep_dequeue()
705 ast_udc_done(ep, req, -ESHUTDOWN); in ast_udc_ep_dequeue()
722 struct ast_udc_ep *ep = to_ast_ep(_ep); in ast_udc_ep_set_halt() local
723 struct ast_udc_dev *udc = ep->udc; in ast_udc_ep_set_halt()
728 EP_DBG(ep, "val:%d\n", value); in ast_udc_ep_set_halt()
732 epnum = usb_endpoint_num(ep->desc); in ast_udc_ep_set_halt()
752 ast_ep_write(ep, ctrl, AST_UDC_EP_CONFIG); in ast_udc_ep_set_halt()
755 ep->stopped = value ? 1 : 0; in ast_udc_ep_set_halt()
789 struct ast_udc_ep *ep = &udc->ep[0]; in ast_udc_ep0_out() local
793 if (list_empty(&ep->queue)) in ast_udc_ep0_out()
796 req = list_entry(ep->queue.next, struct ast_udc_request, queue); in ast_udc_ep0_out()
804 if ((rx_len < ep->ep.maxpacket) || in ast_udc_ep0_out()
807 if (!ep->dir_in) in ast_udc_ep0_out()
808 ast_udc_done(ep, req, 0); in ast_udc_ep0_out()
816 ast_udc_done(ep, req, 0); in ast_udc_ep0_out()
820 ep->dir_in = 0; in ast_udc_ep0_out()
823 ast_udc_ep0_queue(ep, req); in ast_udc_ep0_out()
829 struct ast_udc_ep *ep = &udc->ep[0]; in ast_udc_ep0_in() local
832 if (list_empty(&ep->queue)) { in ast_udc_ep0_in()
841 req = list_entry(ep->queue.next, struct ast_udc_request, queue); in ast_udc_ep0_in()
850 if (ep->dir_in) in ast_udc_ep0_in()
851 ast_udc_done(ep, req, 0); in ast_udc_ep0_in()
855 ast_udc_ep0_queue(ep, req); in ast_udc_ep0_in()
861 struct ast_udc_ep *ep = &udc->ep[ep_num]; in ast_udc_epn_handle() local
865 if (list_empty(&ep->queue)) in ast_udc_epn_handle()
868 req = list_first_entry(&ep->queue, struct ast_udc_request, queue); in ast_udc_epn_handle()
870 len = EP_DMA_GET_TX_SIZE(ast_ep_read(ep, AST_UDC_EP_DMA_STS)); in ast_udc_epn_handle()
873 EP_DBG(ep, "req @%p, length:(%d/%d), %s:0x%x\n", req, in ast_udc_epn_handle()
878 ast_udc_done(ep, req, 0); in ast_udc_epn_handle()
879 req = list_first_entry_or_null(&ep->queue, in ast_udc_epn_handle()
885 if (len < ep->ep.maxpacket) { in ast_udc_epn_handle()
886 ast_udc_done(ep, req, 0); in ast_udc_epn_handle()
887 req = list_first_entry_or_null(&ep->queue, in ast_udc_epn_handle()
895 ast_udc_epn_kick(ep, req); in ast_udc_epn_handle()
900 struct ast_udc_ep *ep = &udc->ep[ep_num]; in ast_udc_epn_handle_desc() local
908 if (list_empty(&ep->queue)) { in ast_udc_epn_handle_desc()
909 dev_warn(dev, "%s request queue empty!\n", ep->ep.name); in ast_udc_epn_handle_desc()
913 req = list_first_entry(&ep->queue, struct ast_udc_request, queue); in ast_udc_epn_handle_desc()
915 ctrl = ast_ep_read(ep, AST_UDC_EP_DMA_CTRL); in ast_udc_epn_handle_desc()
922 ast_ep_read(ep, AST_UDC_EP_DMA_CTRL), in ast_udc_epn_handle_desc()
927 ctrl = ast_ep_read(ep, AST_UDC_EP_DMA_STS); in ast_udc_epn_handle_desc()
937 EP_DBG(ep, "rd_ptr:%d, wr_ptr:%d\n", rd_ptr, wr_ptr); in ast_udc_epn_handle_desc()
941 len_in_desc = EP_DESC1_IN_LEN(ep->descs[i].des_1); in ast_udc_epn_handle_desc()
942 EP_DBG(ep, "desc[%d] len: %d\n", i, len_in_desc); in ast_udc_epn_handle_desc()
952 EP_DBG(ep, "req @%p, length:(%d/%d), %s:0x%x\n", req, in ast_udc_epn_handle_desc()
957 ast_udc_done(ep, req, 0); in ast_udc_epn_handle_desc()
958 req = list_first_entry_or_null(&ep->queue, in ast_udc_epn_handle_desc()
964 if (total_len < ep->ep.maxpacket) { in ast_udc_epn_handle_desc()
965 ast_udc_done(ep, req, 0); in ast_udc_epn_handle_desc()
966 req = list_first_entry_or_null(&ep->queue, in ast_udc_epn_handle_desc()
974 EP_DBG(ep, "More requests\n"); in ast_udc_epn_handle_desc()
975 ast_udc_epn_kick_desc(ep, req); in ast_udc_epn_handle_desc()
997 struct ast_udc_ep *ep; in ast_udc_getstatus() local
1012 status = udc->ep[epnum].stopped; in ast_udc_getstatus()
1018 ep = &udc->ep[epnum]; in ast_udc_getstatus()
1019 EP_DBG(ep, "status: 0x%x\n", status); in ast_udc_getstatus()
1025 EP_DBG(ep, "Can't respond request\n"); in ast_udc_getstatus()
1032 struct ast_udc_ep *ep = &udc->ep[0]; in ast_udc_ep0_handle_setup() local
1049 list_for_each_entry(req, &udc->ep[0].queue, queue) { in ast_udc_ep0_handle_setup()
1051 EP_DBG(ep, "there is req %p in ep0 queue !\n", req); in ast_udc_ep0_handle_setup()
1055 ast_udc_nuke(&udc->ep[0], -ETIMEDOUT); in ast_udc_ep0_handle_setup()
1057 udc->ep[0].dir_in = crq.bRequestType & USB_DIR_IN; in ast_udc_ep0_handle_setup()
1122 struct ast_udc_ep *ep; in ast_udc_isr() local
1139 ep = &udc->ep[1]; in ast_udc_isr()
1140 EP_DBG(ep, "dctrl:0x%x\n", in ast_udc_isr()
1141 ast_ep_read(ep, AST_UDC_EP_DMA_CTRL)); in ast_udc_isr()
1198 ep = &udc->ep[i + 1]; in ast_udc_isr()
1199 if (ep->desc_mode) in ast_udc_isr()
1295 struct ast_udc_ep *ep; in ast_udc_start() local
1306 ep = &udc->ep[i]; in ast_udc_start()
1307 ep->stopped = 0; in ast_udc_start()
1353 struct ast_udc_ep *ep; in ast_udc_init_ep() local
1357 ep = &udc->ep[i]; in ast_udc_init_ep()
1358 ep->ep.name = ast_ep_name[i]; in ast_udc_init_ep()
1360 ep->ep.caps.type_control = true; in ast_udc_init_ep()
1362 ep->ep.caps.type_iso = true; in ast_udc_init_ep()
1363 ep->ep.caps.type_bulk = true; in ast_udc_init_ep()
1364 ep->ep.caps.type_int = true; in ast_udc_init_ep()
1366 ep->ep.caps.dir_in = true; in ast_udc_init_ep()
1367 ep->ep.caps.dir_out = true; in ast_udc_init_ep()
1369 ep->ep.ops = &ast_udc_ep_ops; in ast_udc_init_ep()
1370 ep->udc = udc; in ast_udc_init_ep()
1372 INIT_LIST_HEAD(&ep->queue); in ast_udc_init_ep()
1375 usb_ep_set_maxpacket_limit(&ep->ep, in ast_udc_init_ep()
1380 ep->ep_reg = udc->reg + AST_UDC_EP_BASE + in ast_udc_init_ep()
1383 ep->epn_buf = udc->ep0_buf + (i * AST_UDC_EP_DMA_SIZE); in ast_udc_init_ep()
1384 ep->epn_buf_dma = udc->ep0_buf_dma + (i * AST_UDC_EP_DMA_SIZE); in ast_udc_init_ep()
1385 usb_ep_set_maxpacket_limit(&ep->ep, AST_UDC_EPn_MAX_PACKET); in ast_udc_init_ep()
1387 ep->descs = ep->epn_buf + AST_UDC_EPn_MAX_PACKET; in ast_udc_init_ep()
1388 ep->descs_dma = ep->epn_buf_dma + AST_UDC_EPn_MAX_PACKET; in ast_udc_init_ep()
1389 ep->descs_wptr = 0; in ast_udc_init_ep()
1391 list_add_tail(&ep->ep.ep_list, &udc->gadget.ep_list); in ast_udc_init_ep()
1483 udc->gadget.ep0 = &udc->ep[0].ep; in ast_udc_probe()