Lines Matching refs:uap
277 static unsigned int pl011_reg_to_offset(const struct uart_amba_port *uap, in pl011_reg_to_offset() argument
280 return uap->reg_offset[reg]; in pl011_reg_to_offset()
283 static unsigned int pl011_read(const struct uart_amba_port *uap, in pl011_read() argument
286 void __iomem *addr = uap->port.membase + pl011_reg_to_offset(uap, reg); in pl011_read()
288 return (uap->port.iotype == UPIO_MEM32) ? in pl011_read()
292 static void pl011_write(unsigned int val, const struct uart_amba_port *uap, in pl011_write() argument
295 void __iomem *addr = uap->port.membase + pl011_reg_to_offset(uap, reg); in pl011_write()
297 if (uap->port.iotype == UPIO_MEM32) in pl011_write()
308 static int pl011_fifo_to_tty(struct uart_amba_port *uap) in pl011_fifo_to_tty() argument
315 status = pl011_read(uap, REG_FR); in pl011_fifo_to_tty()
320 ch = pl011_read(uap, REG_DR) | UART_DUMMY_DR_RX; in pl011_fifo_to_tty()
322 uap->port.icount.rx++; in pl011_fifo_to_tty()
327 uap->port.icount.brk++; in pl011_fifo_to_tty()
328 if (uart_handle_break(&uap->port)) in pl011_fifo_to_tty()
331 uap->port.icount.parity++; in pl011_fifo_to_tty()
333 uap->port.icount.frame++; in pl011_fifo_to_tty()
335 uap->port.icount.overrun++; in pl011_fifo_to_tty()
337 ch &= uap->port.read_status_mask; in pl011_fifo_to_tty()
347 spin_unlock(&uap->port.lock); in pl011_fifo_to_tty()
348 sysrq = uart_handle_sysrq_char(&uap->port, ch & 255); in pl011_fifo_to_tty()
349 spin_lock(&uap->port.lock); in pl011_fifo_to_tty()
352 uart_insert_char(&uap->port, ch, UART011_DR_OE, ch, flag); in pl011_fifo_to_tty()
397 static void pl011_dma_probe(struct uart_amba_port *uap) in pl011_dma_probe() argument
400 struct amba_pl011_data *plat = dev_get_platdata(uap->port.dev); in pl011_dma_probe()
401 struct device *dev = uap->port.dev; in pl011_dma_probe()
403 .dst_addr = uap->port.mapbase + in pl011_dma_probe()
404 pl011_reg_to_offset(uap, REG_DR), in pl011_dma_probe()
407 .dst_maxburst = uap->fifosize >> 1, in pl011_dma_probe()
413 uap->dma_probed = true; in pl011_dma_probe()
417 uap->dma_probed = false; in pl011_dma_probe()
423 dev_info(uap->port.dev, "no DMA platform data\n"); in pl011_dma_probe()
434 dev_err(uap->port.dev, "no TX DMA channel!\n"); in pl011_dma_probe()
440 uap->dmatx.chan = chan; in pl011_dma_probe()
442 dev_info(uap->port.dev, "DMA channel TX %s\n", in pl011_dma_probe()
443 dma_chan_name(uap->dmatx.chan)); in pl011_dma_probe()
452 dev_err(uap->port.dev, "no RX DMA channel!\n"); in pl011_dma_probe()
459 .src_addr = uap->port.mapbase + in pl011_dma_probe()
460 pl011_reg_to_offset(uap, REG_DR), in pl011_dma_probe()
463 .src_maxburst = uap->fifosize >> 2, in pl011_dma_probe()
477 dev_info(uap->port.dev, in pl011_dma_probe()
483 uap->dmarx.chan = chan; in pl011_dma_probe()
485 uap->dmarx.auto_poll_rate = false; in pl011_dma_probe()
489 uap->dmarx.auto_poll_rate = false; in pl011_dma_probe()
490 uap->dmarx.poll_rate = plat->dma_rx_poll_rate; in pl011_dma_probe()
497 uap->dmarx.auto_poll_rate = true; in pl011_dma_probe()
498 uap->dmarx.poll_rate = 100; in pl011_dma_probe()
502 uap->dmarx.poll_timeout = in pl011_dma_probe()
505 uap->dmarx.poll_timeout = 3000; in pl011_dma_probe()
507 uap->dmarx.auto_poll_rate = of_property_read_bool( in pl011_dma_probe()
509 if (uap->dmarx.auto_poll_rate) { in pl011_dma_probe()
514 uap->dmarx.poll_rate = x; in pl011_dma_probe()
516 uap->dmarx.poll_rate = 100; in pl011_dma_probe()
519 uap->dmarx.poll_timeout = x; in pl011_dma_probe()
521 uap->dmarx.poll_timeout = 3000; in pl011_dma_probe()
524 dev_info(uap->port.dev, "DMA channel RX %s\n", in pl011_dma_probe()
525 dma_chan_name(uap->dmarx.chan)); in pl011_dma_probe()
529 static void pl011_dma_remove(struct uart_amba_port *uap) in pl011_dma_remove() argument
531 if (uap->dmatx.chan) in pl011_dma_remove()
532 dma_release_channel(uap->dmatx.chan); in pl011_dma_remove()
533 if (uap->dmarx.chan) in pl011_dma_remove()
534 dma_release_channel(uap->dmarx.chan); in pl011_dma_remove()
538 static int pl011_dma_tx_refill(struct uart_amba_port *uap);
539 static void pl011_start_tx_pio(struct uart_amba_port *uap);
547 struct uart_amba_port *uap = data; in pl011_dma_tx_callback() local
548 struct pl011_dmatx_data *dmatx = &uap->dmatx; in pl011_dma_tx_callback()
552 spin_lock_irqsave(&uap->port.lock, flags); in pl011_dma_tx_callback()
553 if (uap->dmatx.queued) in pl011_dma_tx_callback()
557 dmacr = uap->dmacr; in pl011_dma_tx_callback()
558 uap->dmacr = dmacr & ~UART011_TXDMAE; in pl011_dma_tx_callback()
559 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_tx_callback()
570 if (!(dmacr & UART011_TXDMAE) || uart_tx_stopped(&uap->port) || in pl011_dma_tx_callback()
571 uart_circ_empty(&uap->port.state->xmit)) { in pl011_dma_tx_callback()
572 uap->dmatx.queued = false; in pl011_dma_tx_callback()
573 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_dma_tx_callback()
577 if (pl011_dma_tx_refill(uap) <= 0) in pl011_dma_tx_callback()
582 pl011_start_tx_pio(uap); in pl011_dma_tx_callback()
584 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_dma_tx_callback()
595 static int pl011_dma_tx_refill(struct uart_amba_port *uap) in pl011_dma_tx_refill() argument
597 struct pl011_dmatx_data *dmatx = &uap->dmatx; in pl011_dma_tx_refill()
601 struct circ_buf *xmit = &uap->port.state->xmit; in pl011_dma_tx_refill()
611 if (count < (uap->fifosize >> 1)) { in pl011_dma_tx_refill()
612 uap->dmatx.queued = false; in pl011_dma_tx_refill()
644 uap->dmatx.queued = false; in pl011_dma_tx_refill()
645 dev_dbg(uap->port.dev, "unable to map TX DMA\n"); in pl011_dma_tx_refill()
653 uap->dmatx.queued = false; in pl011_dma_tx_refill()
658 dev_dbg(uap->port.dev, "TX DMA busy\n"); in pl011_dma_tx_refill()
664 desc->callback_param = uap; in pl011_dma_tx_refill()
672 uap->dmacr |= UART011_TXDMAE; in pl011_dma_tx_refill()
673 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_tx_refill()
674 uap->dmatx.queued = true; in pl011_dma_tx_refill()
680 uart_xmit_advance(&uap->port, count); in pl011_dma_tx_refill()
683 uart_write_wakeup(&uap->port); in pl011_dma_tx_refill()
696 static bool pl011_dma_tx_irq(struct uart_amba_port *uap) in pl011_dma_tx_irq() argument
698 if (!uap->using_tx_dma) in pl011_dma_tx_irq()
706 if (uap->dmatx.queued) { in pl011_dma_tx_irq()
707 uap->dmacr |= UART011_TXDMAE; in pl011_dma_tx_irq()
708 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_tx_irq()
709 uap->im &= ~UART011_TXIM; in pl011_dma_tx_irq()
710 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_tx_irq()
718 if (pl011_dma_tx_refill(uap) > 0) { in pl011_dma_tx_irq()
719 uap->im &= ~UART011_TXIM; in pl011_dma_tx_irq()
720 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_tx_irq()
730 static inline void pl011_dma_tx_stop(struct uart_amba_port *uap) in pl011_dma_tx_stop() argument
732 if (uap->dmatx.queued) { in pl011_dma_tx_stop()
733 uap->dmacr &= ~UART011_TXDMAE; in pl011_dma_tx_stop()
734 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_tx_stop()
746 static inline bool pl011_dma_tx_start(struct uart_amba_port *uap) in pl011_dma_tx_start() argument
750 if (!uap->using_tx_dma) in pl011_dma_tx_start()
753 if (!uap->port.x_char) { in pl011_dma_tx_start()
757 if (!uap->dmatx.queued) { in pl011_dma_tx_start()
758 if (pl011_dma_tx_refill(uap) > 0) { in pl011_dma_tx_start()
759 uap->im &= ~UART011_TXIM; in pl011_dma_tx_start()
760 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_tx_start()
763 } else if (!(uap->dmacr & UART011_TXDMAE)) { in pl011_dma_tx_start()
764 uap->dmacr |= UART011_TXDMAE; in pl011_dma_tx_start()
765 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_tx_start()
774 dmacr = uap->dmacr; in pl011_dma_tx_start()
775 uap->dmacr &= ~UART011_TXDMAE; in pl011_dma_tx_start()
776 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_tx_start()
778 if (pl011_read(uap, REG_FR) & UART01x_FR_TXFF) { in pl011_dma_tx_start()
787 pl011_write(uap->port.x_char, uap, REG_DR); in pl011_dma_tx_start()
788 uap->port.icount.tx++; in pl011_dma_tx_start()
789 uap->port.x_char = 0; in pl011_dma_tx_start()
792 uap->dmacr = dmacr; in pl011_dma_tx_start()
793 pl011_write(dmacr, uap, REG_DMACR); in pl011_dma_tx_start()
803 __releases(&uap->port.lock) in pl011_dma_flush_buffer()
804 __acquires(&uap->port.lock) in pl011_dma_flush_buffer()
806 struct uart_amba_port *uap = in pl011_dma_flush_buffer() local
809 if (!uap->using_tx_dma) in pl011_dma_flush_buffer()
812 dmaengine_terminate_async(uap->dmatx.chan); in pl011_dma_flush_buffer()
814 if (uap->dmatx.queued) { in pl011_dma_flush_buffer()
815 dma_unmap_sg(uap->dmatx.chan->device->dev, &uap->dmatx.sg, 1, in pl011_dma_flush_buffer()
817 uap->dmatx.queued = false; in pl011_dma_flush_buffer()
818 uap->dmacr &= ~UART011_TXDMAE; in pl011_dma_flush_buffer()
819 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_flush_buffer()
825 static int pl011_dma_rx_trigger_dma(struct uart_amba_port *uap) in pl011_dma_rx_trigger_dma() argument
827 struct dma_chan *rxchan = uap->dmarx.chan; in pl011_dma_rx_trigger_dma()
828 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_trigger_dma()
836 sgbuf = uap->dmarx.use_buf_b ? in pl011_dma_rx_trigger_dma()
837 &uap->dmarx.sgbuf_b : &uap->dmarx.sgbuf_a; in pl011_dma_rx_trigger_dma()
847 uap->dmarx.running = false; in pl011_dma_rx_trigger_dma()
854 desc->callback_param = uap; in pl011_dma_rx_trigger_dma()
858 uap->dmacr |= UART011_RXDMAE; in pl011_dma_rx_trigger_dma()
859 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_rx_trigger_dma()
860 uap->dmarx.running = true; in pl011_dma_rx_trigger_dma()
862 uap->im &= ~UART011_RXIM; in pl011_dma_rx_trigger_dma()
863 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_rx_trigger_dma()
873 static void pl011_dma_rx_chars(struct uart_amba_port *uap, in pl011_dma_rx_chars() argument
877 struct tty_port *port = &uap->port.state->port; in pl011_dma_rx_chars()
879 &uap->dmarx.sgbuf_b : &uap->dmarx.sgbuf_a; in pl011_dma_rx_chars()
883 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_chars()
886 if (uap->dmarx.poll_rate) { in pl011_dma_rx_chars()
905 uap->port.icount.rx += dma_count; in pl011_dma_rx_chars()
907 dev_warn(uap->port.dev, in pl011_dma_rx_chars()
912 if (uap->dmarx.poll_rate) in pl011_dma_rx_chars()
922 UART011_FEIS, uap, REG_ICR); in pl011_dma_rx_chars()
935 fifotaken = pl011_fifo_to_tty(uap); in pl011_dma_rx_chars()
938 dev_vdbg(uap->port.dev, in pl011_dma_rx_chars()
944 static void pl011_dma_rx_irq(struct uart_amba_port *uap) in pl011_dma_rx_irq() argument
946 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_irq()
960 dev_err(uap->port.dev, "unable to pause DMA transfer\n"); in pl011_dma_rx_irq()
964 dev_err(uap->port.dev, "unable to pause DMA transfer\n"); in pl011_dma_rx_irq()
967 uap->dmacr &= ~UART011_RXDMAE; in pl011_dma_rx_irq()
968 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_rx_irq()
969 uap->dmarx.running = false; in pl011_dma_rx_irq()
980 pl011_dma_rx_chars(uap, pending, dmarx->use_buf_b, true); in pl011_dma_rx_irq()
984 if (pl011_dma_rx_trigger_dma(uap)) { in pl011_dma_rx_irq()
985 dev_dbg(uap->port.dev, "could not retrigger RX DMA job " in pl011_dma_rx_irq()
987 uap->im |= UART011_RXIM; in pl011_dma_rx_irq()
988 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_rx_irq()
994 struct uart_amba_port *uap = data; in pl011_dma_rx_callback() local
995 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_callback()
1011 spin_lock_irq(&uap->port.lock); in pl011_dma_rx_callback()
1022 uap->dmarx.running = false; in pl011_dma_rx_callback()
1024 ret = pl011_dma_rx_trigger_dma(uap); in pl011_dma_rx_callback()
1026 pl011_dma_rx_chars(uap, pending, lastbuf, false); in pl011_dma_rx_callback()
1027 spin_unlock_irq(&uap->port.lock); in pl011_dma_rx_callback()
1033 dev_dbg(uap->port.dev, "could not retrigger RX DMA job " in pl011_dma_rx_callback()
1035 uap->im |= UART011_RXIM; in pl011_dma_rx_callback()
1036 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_rx_callback()
1045 static inline void pl011_dma_rx_stop(struct uart_amba_port *uap) in pl011_dma_rx_stop() argument
1047 if (!uap->using_rx_dma) in pl011_dma_rx_stop()
1051 uap->dmacr &= ~UART011_RXDMAE; in pl011_dma_rx_stop()
1052 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_rx_stop()
1062 struct uart_amba_port *uap = from_timer(uap, t, dmarx.timer); in pl011_dma_rx_poll() local
1063 struct tty_port *port = &uap->port.state->port; in pl011_dma_rx_poll()
1064 struct pl011_dmarx_data *dmarx = &uap->dmarx; in pl011_dma_rx_poll()
1065 struct dma_chan *rxchan = uap->dmarx.chan; in pl011_dma_rx_poll()
1073 sgbuf = dmarx->use_buf_b ? &uap->dmarx.sgbuf_b : &uap->dmarx.sgbuf_a; in pl011_dma_rx_poll()
1091 > uap->dmarx.poll_timeout) { in pl011_dma_rx_poll()
1093 spin_lock_irqsave(&uap->port.lock, flags); in pl011_dma_rx_poll()
1094 pl011_dma_rx_stop(uap); in pl011_dma_rx_poll()
1095 uap->im |= UART011_RXIM; in pl011_dma_rx_poll()
1096 pl011_write(uap->im, uap, REG_IMSC); in pl011_dma_rx_poll()
1097 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_dma_rx_poll()
1099 uap->dmarx.running = false; in pl011_dma_rx_poll()
1101 del_timer(&uap->dmarx.timer); in pl011_dma_rx_poll()
1103 mod_timer(&uap->dmarx.timer, in pl011_dma_rx_poll()
1104 jiffies + msecs_to_jiffies(uap->dmarx.poll_rate)); in pl011_dma_rx_poll()
1108 static void pl011_dma_startup(struct uart_amba_port *uap) in pl011_dma_startup() argument
1112 if (!uap->dma_probed) in pl011_dma_startup()
1113 pl011_dma_probe(uap); in pl011_dma_startup()
1115 if (!uap->dmatx.chan) in pl011_dma_startup()
1118 uap->dmatx.buf = kmalloc(PL011_DMA_BUFFER_SIZE, GFP_KERNEL | __GFP_DMA); in pl011_dma_startup()
1119 if (!uap->dmatx.buf) { in pl011_dma_startup()
1120 dev_err(uap->port.dev, "no memory for DMA TX buffer\n"); in pl011_dma_startup()
1121 uap->port.fifosize = uap->fifosize; in pl011_dma_startup()
1125 sg_init_one(&uap->dmatx.sg, uap->dmatx.buf, PL011_DMA_BUFFER_SIZE); in pl011_dma_startup()
1128 uap->port.fifosize = PL011_DMA_BUFFER_SIZE; in pl011_dma_startup()
1129 uap->using_tx_dma = true; in pl011_dma_startup()
1131 if (!uap->dmarx.chan) in pl011_dma_startup()
1135 ret = pl011_sgbuf_init(uap->dmarx.chan, &uap->dmarx.sgbuf_a, in pl011_dma_startup()
1138 dev_err(uap->port.dev, "failed to init DMA %s: %d\n", in pl011_dma_startup()
1143 ret = pl011_sgbuf_init(uap->dmarx.chan, &uap->dmarx.sgbuf_b, in pl011_dma_startup()
1146 dev_err(uap->port.dev, "failed to init DMA %s: %d\n", in pl011_dma_startup()
1148 pl011_sgbuf_free(uap->dmarx.chan, &uap->dmarx.sgbuf_a, in pl011_dma_startup()
1153 uap->using_rx_dma = true; in pl011_dma_startup()
1157 uap->dmacr |= UART011_DMAONERR; in pl011_dma_startup()
1158 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_startup()
1165 if (uap->vendor->dma_threshold) in pl011_dma_startup()
1167 uap, REG_ST_DMAWM); in pl011_dma_startup()
1169 if (uap->using_rx_dma) { in pl011_dma_startup()
1170 if (pl011_dma_rx_trigger_dma(uap)) in pl011_dma_startup()
1171 dev_dbg(uap->port.dev, "could not trigger initial " in pl011_dma_startup()
1173 if (uap->dmarx.poll_rate) { in pl011_dma_startup()
1174 timer_setup(&uap->dmarx.timer, pl011_dma_rx_poll, 0); in pl011_dma_startup()
1175 mod_timer(&uap->dmarx.timer, in pl011_dma_startup()
1177 msecs_to_jiffies(uap->dmarx.poll_rate)); in pl011_dma_startup()
1178 uap->dmarx.last_residue = PL011_DMA_BUFFER_SIZE; in pl011_dma_startup()
1179 uap->dmarx.last_jiffies = jiffies; in pl011_dma_startup()
1184 static void pl011_dma_shutdown(struct uart_amba_port *uap) in pl011_dma_shutdown() argument
1186 if (!(uap->using_tx_dma || uap->using_rx_dma)) in pl011_dma_shutdown()
1190 while (pl011_read(uap, REG_FR) & uap->vendor->fr_busy) in pl011_dma_shutdown()
1193 spin_lock_irq(&uap->port.lock); in pl011_dma_shutdown()
1194 uap->dmacr &= ~(UART011_DMAONERR | UART011_RXDMAE | UART011_TXDMAE); in pl011_dma_shutdown()
1195 pl011_write(uap->dmacr, uap, REG_DMACR); in pl011_dma_shutdown()
1196 spin_unlock_irq(&uap->port.lock); in pl011_dma_shutdown()
1198 if (uap->using_tx_dma) { in pl011_dma_shutdown()
1200 dmaengine_terminate_all(uap->dmatx.chan); in pl011_dma_shutdown()
1201 if (uap->dmatx.queued) { in pl011_dma_shutdown()
1202 dma_unmap_sg(uap->dmatx.chan->device->dev, &uap->dmatx.sg, 1, in pl011_dma_shutdown()
1204 uap->dmatx.queued = false; in pl011_dma_shutdown()
1207 kfree(uap->dmatx.buf); in pl011_dma_shutdown()
1208 uap->using_tx_dma = false; in pl011_dma_shutdown()
1211 if (uap->using_rx_dma) { in pl011_dma_shutdown()
1212 dmaengine_terminate_all(uap->dmarx.chan); in pl011_dma_shutdown()
1214 pl011_sgbuf_free(uap->dmarx.chan, &uap->dmarx.sgbuf_a, DMA_FROM_DEVICE); in pl011_dma_shutdown()
1215 pl011_sgbuf_free(uap->dmarx.chan, &uap->dmarx.sgbuf_b, DMA_FROM_DEVICE); in pl011_dma_shutdown()
1216 if (uap->dmarx.poll_rate) in pl011_dma_shutdown()
1217 del_timer_sync(&uap->dmarx.timer); in pl011_dma_shutdown()
1218 uap->using_rx_dma = false; in pl011_dma_shutdown()
1222 static inline bool pl011_dma_rx_available(struct uart_amba_port *uap) in pl011_dma_rx_available() argument
1224 return uap->using_rx_dma; in pl011_dma_rx_available()
1227 static inline bool pl011_dma_rx_running(struct uart_amba_port *uap) in pl011_dma_rx_running() argument
1229 return uap->using_rx_dma && uap->dmarx.running; in pl011_dma_rx_running()
1234 static inline void pl011_dma_remove(struct uart_amba_port *uap) in pl011_dma_remove() argument
1238 static inline void pl011_dma_startup(struct uart_amba_port *uap) in pl011_dma_startup() argument
1242 static inline void pl011_dma_shutdown(struct uart_amba_port *uap) in pl011_dma_shutdown() argument
1246 static inline bool pl011_dma_tx_irq(struct uart_amba_port *uap) in pl011_dma_tx_irq() argument
1251 static inline void pl011_dma_tx_stop(struct uart_amba_port *uap) in pl011_dma_tx_stop() argument
1255 static inline bool pl011_dma_tx_start(struct uart_amba_port *uap) in pl011_dma_tx_start() argument
1260 static inline void pl011_dma_rx_irq(struct uart_amba_port *uap) in pl011_dma_rx_irq() argument
1264 static inline void pl011_dma_rx_stop(struct uart_amba_port *uap) in pl011_dma_rx_stop() argument
1268 static inline int pl011_dma_rx_trigger_dma(struct uart_amba_port *uap) in pl011_dma_rx_trigger_dma() argument
1273 static inline bool pl011_dma_rx_available(struct uart_amba_port *uap) in pl011_dma_rx_available() argument
1278 static inline bool pl011_dma_rx_running(struct uart_amba_port *uap) in pl011_dma_rx_running() argument
1286 static void pl011_rs485_tx_stop(struct uart_amba_port *uap) in pl011_rs485_tx_stop() argument
1292 const int MAX_TX_DRAIN_ITERS = uap->port.fifosize * 2; in pl011_rs485_tx_stop()
1293 struct uart_port *port = &uap->port; in pl011_rs485_tx_stop()
1305 udelay(uap->rs485_tx_drain_interval); in pl011_rs485_tx_stop()
1312 cr = pl011_read(uap, REG_CR); in pl011_rs485_tx_stop()
1322 pl011_write(cr, uap, REG_CR); in pl011_rs485_tx_stop()
1324 uap->rs485_tx_started = false; in pl011_rs485_tx_stop()
1329 struct uart_amba_port *uap = in pl011_stop_tx() local
1332 uap->im &= ~UART011_TXIM; in pl011_stop_tx()
1333 pl011_write(uap->im, uap, REG_IMSC); in pl011_stop_tx()
1334 pl011_dma_tx_stop(uap); in pl011_stop_tx()
1336 if ((port->rs485.flags & SER_RS485_ENABLED) && uap->rs485_tx_started) in pl011_stop_tx()
1337 pl011_rs485_tx_stop(uap); in pl011_stop_tx()
1340 static bool pl011_tx_chars(struct uart_amba_port *uap, bool from_irq);
1343 static void pl011_start_tx_pio(struct uart_amba_port *uap) in pl011_start_tx_pio() argument
1345 if (pl011_tx_chars(uap, false)) { in pl011_start_tx_pio()
1346 uap->im |= UART011_TXIM; in pl011_start_tx_pio()
1347 pl011_write(uap->im, uap, REG_IMSC); in pl011_start_tx_pio()
1353 struct uart_amba_port *uap = in pl011_start_tx() local
1356 if (!pl011_dma_tx_start(uap)) in pl011_start_tx()
1357 pl011_start_tx_pio(uap); in pl011_start_tx()
1362 struct uart_amba_port *uap = in pl011_stop_rx() local
1365 uap->im &= ~(UART011_RXIM|UART011_RTIM|UART011_FEIM| in pl011_stop_rx()
1367 pl011_write(uap->im, uap, REG_IMSC); in pl011_stop_rx()
1369 pl011_dma_rx_stop(uap); in pl011_stop_rx()
1383 struct uart_amba_port *uap = in pl011_enable_ms() local
1386 uap->im |= UART011_RIMIM|UART011_CTSMIM|UART011_DCDMIM|UART011_DSRMIM; in pl011_enable_ms()
1387 pl011_write(uap->im, uap, REG_IMSC); in pl011_enable_ms()
1390 static void pl011_rx_chars(struct uart_amba_port *uap) in pl011_rx_chars() argument
1391 __releases(&uap->port.lock) in pl011_rx_chars()
1392 __acquires(&uap->port.lock) in pl011_rx_chars()
1394 pl011_fifo_to_tty(uap); in pl011_rx_chars()
1396 spin_unlock(&uap->port.lock); in pl011_rx_chars()
1397 tty_flip_buffer_push(&uap->port.state->port); in pl011_rx_chars()
1402 if (pl011_dma_rx_available(uap)) { in pl011_rx_chars()
1403 if (pl011_dma_rx_trigger_dma(uap)) { in pl011_rx_chars()
1404 dev_dbg(uap->port.dev, "could not trigger RX DMA job " in pl011_rx_chars()
1406 uap->im |= UART011_RXIM; in pl011_rx_chars()
1407 pl011_write(uap->im, uap, REG_IMSC); in pl011_rx_chars()
1411 if (uap->dmarx.poll_rate) { in pl011_rx_chars()
1412 uap->dmarx.last_jiffies = jiffies; in pl011_rx_chars()
1413 uap->dmarx.last_residue = PL011_DMA_BUFFER_SIZE; in pl011_rx_chars()
1414 mod_timer(&uap->dmarx.timer, in pl011_rx_chars()
1416 msecs_to_jiffies(uap->dmarx.poll_rate)); in pl011_rx_chars()
1421 spin_lock(&uap->port.lock); in pl011_rx_chars()
1424 static bool pl011_tx_char(struct uart_amba_port *uap, unsigned char c, in pl011_tx_char() argument
1428 pl011_read(uap, REG_FR) & UART01x_FR_TXFF) in pl011_tx_char()
1431 pl011_write(c, uap, REG_DR); in pl011_tx_char()
1432 uap->port.icount.tx++; in pl011_tx_char()
1437 static void pl011_rs485_tx_start(struct uart_amba_port *uap) in pl011_rs485_tx_start() argument
1439 struct uart_port *port = &uap->port; in pl011_rs485_tx_start()
1443 cr = pl011_read(uap, REG_CR); in pl011_rs485_tx_start()
1455 pl011_write(cr, uap, REG_CR); in pl011_rs485_tx_start()
1460 uap->rs485_tx_started = true; in pl011_rs485_tx_start()
1464 static bool pl011_tx_chars(struct uart_amba_port *uap, bool from_irq) in pl011_tx_chars() argument
1466 struct circ_buf *xmit = &uap->port.state->xmit; in pl011_tx_chars()
1467 int count = uap->fifosize >> 1; in pl011_tx_chars()
1469 if ((uap->port.rs485.flags & SER_RS485_ENABLED) && in pl011_tx_chars()
1470 !uap->rs485_tx_started) in pl011_tx_chars()
1471 pl011_rs485_tx_start(uap); in pl011_tx_chars()
1473 if (uap->port.x_char) { in pl011_tx_chars()
1474 if (!pl011_tx_char(uap, uap->port.x_char, from_irq)) in pl011_tx_chars()
1476 uap->port.x_char = 0; in pl011_tx_chars()
1479 if (uart_circ_empty(xmit) || uart_tx_stopped(&uap->port)) { in pl011_tx_chars()
1480 pl011_stop_tx(&uap->port); in pl011_tx_chars()
1485 if (pl011_dma_tx_irq(uap)) in pl011_tx_chars()
1492 if (!pl011_tx_char(uap, xmit->buf[xmit->tail], from_irq)) in pl011_tx_chars()
1499 uart_write_wakeup(&uap->port); in pl011_tx_chars()
1502 pl011_stop_tx(&uap->port); in pl011_tx_chars()
1508 static void pl011_modem_status(struct uart_amba_port *uap) in pl011_modem_status() argument
1512 status = pl011_read(uap, REG_FR) & UART01x_FR_MODEM_ANY; in pl011_modem_status()
1514 delta = status ^ uap->old_status; in pl011_modem_status()
1515 uap->old_status = status; in pl011_modem_status()
1521 uart_handle_dcd_change(&uap->port, status & UART01x_FR_DCD); in pl011_modem_status()
1523 if (delta & uap->vendor->fr_dsr) in pl011_modem_status()
1524 uap->port.icount.dsr++; in pl011_modem_status()
1526 if (delta & uap->vendor->fr_cts) in pl011_modem_status()
1527 uart_handle_cts_change(&uap->port, in pl011_modem_status()
1528 status & uap->vendor->fr_cts); in pl011_modem_status()
1530 wake_up_interruptible(&uap->port.state->port.delta_msr_wait); in pl011_modem_status()
1533 static void check_apply_cts_event_workaround(struct uart_amba_port *uap) in check_apply_cts_event_workaround() argument
1535 if (!uap->vendor->cts_event_workaround) in check_apply_cts_event_workaround()
1539 pl011_write(0x00, uap, REG_ICR); in check_apply_cts_event_workaround()
1546 pl011_read(uap, REG_ICR); in check_apply_cts_event_workaround()
1547 pl011_read(uap, REG_ICR); in check_apply_cts_event_workaround()
1552 struct uart_amba_port *uap = dev_id; in pl011_int() local
1557 spin_lock_irqsave(&uap->port.lock, flags); in pl011_int()
1558 status = pl011_read(uap, REG_RIS) & uap->im; in pl011_int()
1561 check_apply_cts_event_workaround(uap); in pl011_int()
1565 uap, REG_ICR); in pl011_int()
1568 if (pl011_dma_rx_running(uap)) in pl011_int()
1569 pl011_dma_rx_irq(uap); in pl011_int()
1571 pl011_rx_chars(uap); in pl011_int()
1575 pl011_modem_status(uap); in pl011_int()
1577 pl011_tx_chars(uap, true); in pl011_int()
1582 status = pl011_read(uap, REG_RIS) & uap->im; in pl011_int()
1587 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_int()
1594 struct uart_amba_port *uap = in pl011_tx_empty() local
1598 unsigned int status = pl011_read(uap, REG_FR) ^ uap->vendor->inv_fr; in pl011_tx_empty()
1600 return status & (uap->vendor->fr_busy | UART01x_FR_TXFF) ? in pl011_tx_empty()
1606 struct uart_amba_port *uap = in pl011_get_mctrl() local
1609 unsigned int status = pl011_read(uap, REG_FR); in pl011_get_mctrl()
1616 TIOCMBIT(uap->vendor->fr_dsr, TIOCM_DSR); in pl011_get_mctrl()
1617 TIOCMBIT(uap->vendor->fr_cts, TIOCM_CTS); in pl011_get_mctrl()
1618 TIOCMBIT(uap->vendor->fr_ri, TIOCM_RNG); in pl011_get_mctrl()
1625 struct uart_amba_port *uap = in pl011_set_mctrl() local
1629 cr = pl011_read(uap, REG_CR); in pl011_set_mctrl()
1649 pl011_write(cr, uap, REG_CR); in pl011_set_mctrl()
1654 struct uart_amba_port *uap = in pl011_break_ctl() local
1659 spin_lock_irqsave(&uap->port.lock, flags); in pl011_break_ctl()
1660 lcr_h = pl011_read(uap, REG_LCRH_TX); in pl011_break_ctl()
1665 pl011_write(lcr_h, uap, REG_LCRH_TX); in pl011_break_ctl()
1666 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_break_ctl()
1673 struct uart_amba_port *uap = in pl011_quiesce_irqs() local
1676 pl011_write(pl011_read(uap, REG_MIS), uap, REG_ICR); in pl011_quiesce_irqs()
1690 pl011_write(pl011_read(uap, REG_IMSC) & ~UART011_TXIM, uap, in pl011_quiesce_irqs()
1696 struct uart_amba_port *uap = in pl011_get_poll_char() local
1706 status = pl011_read(uap, REG_FR); in pl011_get_poll_char()
1710 return pl011_read(uap, REG_DR); in pl011_get_poll_char()
1716 struct uart_amba_port *uap = in pl011_put_poll_char() local
1719 while (pl011_read(uap, REG_FR) & UART01x_FR_TXFF) in pl011_put_poll_char()
1722 pl011_write(ch, uap, REG_DR); in pl011_put_poll_char()
1729 struct uart_amba_port *uap = in pl011_hwinit() local
1739 retval = clk_prepare_enable(uap->clk); in pl011_hwinit()
1743 uap->port.uartclk = clk_get_rate(uap->clk); in pl011_hwinit()
1748 uap, REG_ICR); in pl011_hwinit()
1754 uap->im = pl011_read(uap, REG_IMSC); in pl011_hwinit()
1755 pl011_write(UART011_RTIM | UART011_RXIM, uap, REG_IMSC); in pl011_hwinit()
1757 if (dev_get_platdata(uap->port.dev)) { in pl011_hwinit()
1760 plat = dev_get_platdata(uap->port.dev); in pl011_hwinit()
1767 static bool pl011_split_lcrh(const struct uart_amba_port *uap) in pl011_split_lcrh() argument
1769 return pl011_reg_to_offset(uap, REG_LCRH_RX) != in pl011_split_lcrh()
1770 pl011_reg_to_offset(uap, REG_LCRH_TX); in pl011_split_lcrh()
1773 static void pl011_write_lcr_h(struct uart_amba_port *uap, unsigned int lcr_h) in pl011_write_lcr_h() argument
1775 pl011_write(lcr_h, uap, REG_LCRH_RX); in pl011_write_lcr_h()
1776 if (pl011_split_lcrh(uap)) { in pl011_write_lcr_h()
1783 pl011_write(0xff, uap, REG_MIS); in pl011_write_lcr_h()
1784 pl011_write(lcr_h, uap, REG_LCRH_TX); in pl011_write_lcr_h()
1788 static int pl011_allocate_irq(struct uart_amba_port *uap) in pl011_allocate_irq() argument
1790 pl011_write(uap->im, uap, REG_IMSC); in pl011_allocate_irq()
1792 return request_irq(uap->port.irq, pl011_int, IRQF_SHARED, "uart-pl011", uap); in pl011_allocate_irq()
1800 static void pl011_enable_interrupts(struct uart_amba_port *uap) in pl011_enable_interrupts() argument
1805 spin_lock_irqsave(&uap->port.lock, flags); in pl011_enable_interrupts()
1808 pl011_write(UART011_RTIS | UART011_RXIS, uap, REG_ICR); in pl011_enable_interrupts()
1816 for (i = 0; i < uap->fifosize * 2; ++i) { in pl011_enable_interrupts()
1817 if (pl011_read(uap, REG_FR) & UART01x_FR_RXFE) in pl011_enable_interrupts()
1820 pl011_read(uap, REG_DR); in pl011_enable_interrupts()
1823 uap->im = UART011_RTIM; in pl011_enable_interrupts()
1824 if (!pl011_dma_rx_running(uap)) in pl011_enable_interrupts()
1825 uap->im |= UART011_RXIM; in pl011_enable_interrupts()
1826 pl011_write(uap->im, uap, REG_IMSC); in pl011_enable_interrupts()
1827 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_enable_interrupts()
1832 struct uart_amba_port *uap = container_of(port, struct uart_amba_port, port); in pl011_unthrottle_rx() local
1835 spin_lock_irqsave(&uap->port.lock, flags); in pl011_unthrottle_rx()
1837 uap->im = UART011_RTIM; in pl011_unthrottle_rx()
1838 if (!pl011_dma_rx_running(uap)) in pl011_unthrottle_rx()
1839 uap->im |= UART011_RXIM; in pl011_unthrottle_rx()
1841 pl011_write(uap->im, uap, REG_IMSC); in pl011_unthrottle_rx()
1843 spin_unlock_irqrestore(&uap->port.lock, flags); in pl011_unthrottle_rx()
1848 struct uart_amba_port *uap = in pl011_startup() local
1857 retval = pl011_allocate_irq(uap); in pl011_startup()
1861 pl011_write(uap->vendor->ifls, uap, REG_IFLS); in pl011_startup()
1863 spin_lock_irq(&uap->port.lock); in pl011_startup()
1865 cr = pl011_read(uap, REG_CR); in pl011_startup()
1872 pl011_write(cr, uap, REG_CR); in pl011_startup()
1874 spin_unlock_irq(&uap->port.lock); in pl011_startup()
1879 uap->old_status = pl011_read(uap, REG_FR) & UART01x_FR_MODEM_ANY; in pl011_startup()
1882 pl011_dma_startup(uap); in pl011_startup()
1884 pl011_enable_interrupts(uap); in pl011_startup()
1889 clk_disable_unprepare(uap->clk); in pl011_startup()
1895 struct uart_amba_port *uap = in sbsa_uart_startup() local
1903 retval = pl011_allocate_irq(uap); in sbsa_uart_startup()
1908 uap->old_status = 0; in sbsa_uart_startup()
1910 pl011_enable_interrupts(uap); in sbsa_uart_startup()
1915 static void pl011_shutdown_channel(struct uart_amba_port *uap, in pl011_shutdown_channel() argument
1920 val = pl011_read(uap, lcrh); in pl011_shutdown_channel()
1922 pl011_write(val, uap, lcrh); in pl011_shutdown_channel()
1930 static void pl011_disable_uart(struct uart_amba_port *uap) in pl011_disable_uart() argument
1934 uap->port.status &= ~(UPSTAT_AUTOCTS | UPSTAT_AUTORTS); in pl011_disable_uart()
1935 spin_lock_irq(&uap->port.lock); in pl011_disable_uart()
1936 cr = pl011_read(uap, REG_CR); in pl011_disable_uart()
1939 pl011_write(cr, uap, REG_CR); in pl011_disable_uart()
1940 spin_unlock_irq(&uap->port.lock); in pl011_disable_uart()
1945 pl011_shutdown_channel(uap, REG_LCRH_RX); in pl011_disable_uart()
1946 if (pl011_split_lcrh(uap)) in pl011_disable_uart()
1947 pl011_shutdown_channel(uap, REG_LCRH_TX); in pl011_disable_uart()
1950 static void pl011_disable_interrupts(struct uart_amba_port *uap) in pl011_disable_interrupts() argument
1952 spin_lock_irq(&uap->port.lock); in pl011_disable_interrupts()
1955 uap->im = 0; in pl011_disable_interrupts()
1956 pl011_write(uap->im, uap, REG_IMSC); in pl011_disable_interrupts()
1957 pl011_write(0xffff, uap, REG_ICR); in pl011_disable_interrupts()
1959 spin_unlock_irq(&uap->port.lock); in pl011_disable_interrupts()
1964 struct uart_amba_port *uap = in pl011_shutdown() local
1967 pl011_disable_interrupts(uap); in pl011_shutdown()
1969 pl011_dma_shutdown(uap); in pl011_shutdown()
1971 if ((port->rs485.flags & SER_RS485_ENABLED) && uap->rs485_tx_started) in pl011_shutdown()
1972 pl011_rs485_tx_stop(uap); in pl011_shutdown()
1974 free_irq(uap->port.irq, uap); in pl011_shutdown()
1976 pl011_disable_uart(uap); in pl011_shutdown()
1981 clk_disable_unprepare(uap->clk); in pl011_shutdown()
1985 if (dev_get_platdata(uap->port.dev)) { in pl011_shutdown()
1988 plat = dev_get_platdata(uap->port.dev); in pl011_shutdown()
1993 if (uap->port.ops->flush_buffer) in pl011_shutdown()
1994 uap->port.ops->flush_buffer(port); in pl011_shutdown()
1999 struct uart_amba_port *uap = in sbsa_uart_shutdown() local
2002 pl011_disable_interrupts(uap); in sbsa_uart_shutdown()
2004 free_irq(uap->port.irq, uap); in sbsa_uart_shutdown()
2006 if (uap->port.ops->flush_buffer) in sbsa_uart_shutdown()
2007 uap->port.ops->flush_buffer(port); in sbsa_uart_shutdown()
2046 struct uart_amba_port *uap = in pl011_set_termios() local
2053 if (uap->vendor->oversampling) in pl011_set_termios()
2067 if (uap->dmarx.auto_poll_rate) in pl011_set_termios()
2068 uap->dmarx.poll_rate = DIV_ROUND_UP(10000000, baud); in pl011_set_termios()
2099 if (uap->fifosize > 1) in pl011_set_termios()
2116 uap->rs485_tx_drain_interval = DIV_ROUND_UP(bits * 1000 * 1000, baud); in pl011_set_termios()
2126 old_cr = pl011_read(uap, REG_CR); in pl011_set_termios()
2139 if (uap->vendor->oversampling) { in pl011_set_termios()
2152 if (uap->vendor->oversampling) { in pl011_set_termios()
2159 pl011_write(quot & 0x3f, uap, REG_FBRD); in pl011_set_termios()
2160 pl011_write(quot >> 6, uap, REG_IBRD); in pl011_set_termios()
2168 pl011_write_lcr_h(uap, lcr_h); in pl011_set_termios()
2169 pl011_write(old_cr, uap, REG_CR); in pl011_set_termios()
2178 struct uart_amba_port *uap = in sbsa_uart_set_termios() local
2182 tty_termios_encode_baud_rate(termios, uap->fixed_baud, uap->fixed_baud); in sbsa_uart_set_termios()
2190 uart_update_timeout(port, CS8, uap->fixed_baud); in sbsa_uart_set_termios()
2197 struct uart_amba_port *uap = in pl011_type() local
2199 return uap->port.type == PORT_AMBA ? uap->type : NULL; in pl011_type()
2231 struct uart_amba_port *uap = in pl011_rs485_config() local
2235 pl011_rs485_tx_stop(uap); in pl011_rs485_config()
2239 u32 cr = pl011_read(uap, REG_CR); in pl011_rs485_config()
2242 pl011_write(cr, uap, REG_CR); in pl011_rs485_config()
2309 struct uart_amba_port *uap = in pl011_console_putchar() local
2312 while (pl011_read(uap, REG_FR) & UART01x_FR_TXFF) in pl011_console_putchar()
2314 pl011_write(ch, uap, REG_DR); in pl011_console_putchar()
2320 struct uart_amba_port *uap = amba_ports[co->index]; in pl011_console_write() local
2325 clk_enable(uap->clk); in pl011_console_write()
2328 if (uap->port.sysrq) in pl011_console_write()
2331 locked = spin_trylock(&uap->port.lock); in pl011_console_write()
2333 spin_lock(&uap->port.lock); in pl011_console_write()
2338 if (!uap->vendor->always_enabled) { in pl011_console_write()
2339 old_cr = pl011_read(uap, REG_CR); in pl011_console_write()
2342 pl011_write(new_cr, uap, REG_CR); in pl011_console_write()
2345 uart_console_write(&uap->port, s, count, pl011_console_putchar); in pl011_console_write()
2352 while ((pl011_read(uap, REG_FR) ^ uap->vendor->inv_fr) in pl011_console_write()
2353 & uap->vendor->fr_busy) in pl011_console_write()
2355 if (!uap->vendor->always_enabled) in pl011_console_write()
2356 pl011_write(old_cr, uap, REG_CR); in pl011_console_write()
2359 spin_unlock(&uap->port.lock); in pl011_console_write()
2362 clk_disable(uap->clk); in pl011_console_write()
2365 static void pl011_console_get_options(struct uart_amba_port *uap, int *baud, in pl011_console_get_options() argument
2368 if (pl011_read(uap, REG_CR) & UART01x_CR_UARTEN) { in pl011_console_get_options()
2371 lcr_h = pl011_read(uap, REG_LCRH_TX); in pl011_console_get_options()
2386 ibrd = pl011_read(uap, REG_IBRD); in pl011_console_get_options()
2387 fbrd = pl011_read(uap, REG_FBRD); in pl011_console_get_options()
2389 *baud = uap->port.uartclk * 4 / (64 * ibrd + fbrd); in pl011_console_get_options()
2391 if (uap->vendor->oversampling) { in pl011_console_get_options()
2392 if (pl011_read(uap, REG_CR) in pl011_console_get_options()
2401 struct uart_amba_port *uap; in pl011_console_setup() local
2415 uap = amba_ports[co->index]; in pl011_console_setup()
2416 if (!uap) in pl011_console_setup()
2420 pinctrl_pm_select_default_state(uap->port.dev); in pl011_console_setup()
2422 ret = clk_prepare(uap->clk); in pl011_console_setup()
2426 if (dev_get_platdata(uap->port.dev)) { in pl011_console_setup()
2429 plat = dev_get_platdata(uap->port.dev); in pl011_console_setup()
2434 uap->port.uartclk = clk_get_rate(uap->clk); in pl011_console_setup()
2436 if (uap->vendor->fixed_options) { in pl011_console_setup()
2437 baud = uap->fixed_baud; in pl011_console_setup()
2443 pl011_console_get_options(uap, &baud, &parity, &bits); in pl011_console_setup()
2446 return uart_set_options(&uap->port, co, baud, parity, bits, flow); in pl011_console_setup()
2684 static void pl011_unregister_port(struct uart_amba_port *uap) in pl011_unregister_port() argument
2690 if (amba_ports[i] == uap) in pl011_unregister_port()
2695 pl011_dma_remove(uap); in pl011_unregister_port()
2711 static int pl011_get_rs485_mode(struct uart_amba_port *uap) in pl011_get_rs485_mode() argument
2713 struct uart_port *port = &uap->port; in pl011_get_rs485_mode()
2723 static int pl011_setup_port(struct device *dev, struct uart_amba_port *uap, in pl011_setup_port() argument
2735 uap->port.dev = dev; in pl011_setup_port()
2736 uap->port.mapbase = mmiobase->start; in pl011_setup_port()
2737 uap->port.membase = base; in pl011_setup_port()
2738 uap->port.fifosize = uap->fifosize; in pl011_setup_port()
2739 uap->port.has_sysrq = IS_ENABLED(CONFIG_SERIAL_AMBA_PL011_CONSOLE); in pl011_setup_port()
2740 uap->port.flags = UPF_BOOT_AUTOCONF; in pl011_setup_port()
2741 uap->port.line = index; in pl011_setup_port()
2743 ret = pl011_get_rs485_mode(uap); in pl011_setup_port()
2747 amba_ports[index] = uap; in pl011_setup_port()
2752 static int pl011_register_port(struct uart_amba_port *uap) in pl011_register_port() argument
2757 pl011_write(0, uap, REG_IMSC); in pl011_register_port()
2758 pl011_write(0xffff, uap, REG_ICR); in pl011_register_port()
2763 dev_err(uap->port.dev, in pl011_register_port()
2766 if (amba_ports[i] == uap) in pl011_register_port()
2772 ret = uart_add_one_port(&amba_reg, &uap->port); in pl011_register_port()
2774 pl011_unregister_port(uap); in pl011_register_port()
2788 struct uart_amba_port *uap; in pl011_probe() local
2797 uap = devm_kzalloc(&dev->dev, sizeof(struct uart_amba_port), in pl011_probe()
2799 if (!uap) in pl011_probe()
2802 uap->clk = devm_clk_get(&dev->dev, NULL); in pl011_probe()
2803 if (IS_ERR(uap->clk)) in pl011_probe()
2804 return PTR_ERR(uap->clk); in pl011_probe()
2806 uap->reg_offset = vendor->reg_offset; in pl011_probe()
2807 uap->vendor = vendor; in pl011_probe()
2808 uap->fifosize = vendor->get_fifosize(dev); in pl011_probe()
2809 uap->port.iotype = vendor->access_32b ? UPIO_MEM32 : UPIO_MEM; in pl011_probe()
2810 uap->port.irq = dev->irq[0]; in pl011_probe()
2811 uap->port.ops = &amba_pl011_pops; in pl011_probe()
2812 uap->port.rs485_config = pl011_rs485_config; in pl011_probe()
2813 uap->port.rs485_supported = pl011_rs485_supported; in pl011_probe()
2814 snprintf(uap->type, sizeof(uap->type), "PL011 rev%u", amba_rev(dev)); in pl011_probe()
2819 uap->port.iotype = UPIO_MEM; in pl011_probe()
2822 uap->port.iotype = UPIO_MEM32; in pl011_probe()
2831 ret = pl011_setup_port(&dev->dev, uap, &dev->res, portnr); in pl011_probe()
2835 amba_set_drvdata(dev, uap); in pl011_probe()
2837 return pl011_register_port(uap); in pl011_probe()
2842 struct uart_amba_port *uap = amba_get_drvdata(dev); in pl011_remove() local
2844 uart_remove_one_port(&amba_reg, &uap->port); in pl011_remove()
2845 pl011_unregister_port(uap); in pl011_remove()
2851 struct uart_amba_port *uap = dev_get_drvdata(dev); in pl011_suspend() local
2853 if (!uap) in pl011_suspend()
2856 return uart_suspend_port(&amba_reg, &uap->port); in pl011_suspend()
2861 struct uart_amba_port *uap = dev_get_drvdata(dev); in pl011_resume() local
2863 if (!uap) in pl011_resume()
2866 return uart_resume_port(&amba_reg, &uap->port); in pl011_resume()
2874 struct uart_amba_port *uap; in sbsa_uart_probe() local
2897 uap = devm_kzalloc(&pdev->dev, sizeof(struct uart_amba_port), in sbsa_uart_probe()
2899 if (!uap) in sbsa_uart_probe()
2905 uap->port.irq = ret; in sbsa_uart_probe()
2910 uap->vendor = &vendor_qdt_qdf2400_e44; in sbsa_uart_probe()
2913 uap->vendor = &vendor_sbsa; in sbsa_uart_probe()
2915 uap->reg_offset = uap->vendor->reg_offset; in sbsa_uart_probe()
2916 uap->fifosize = 32; in sbsa_uart_probe()
2917 uap->port.iotype = uap->vendor->access_32b ? UPIO_MEM32 : UPIO_MEM; in sbsa_uart_probe()
2918 uap->port.ops = &sbsa_uart_pops; in sbsa_uart_probe()
2919 uap->fixed_baud = baudrate; in sbsa_uart_probe()
2921 snprintf(uap->type, sizeof(uap->type), "SBSA"); in sbsa_uart_probe()
2925 ret = pl011_setup_port(&pdev->dev, uap, r, portnr); in sbsa_uart_probe()
2929 platform_set_drvdata(pdev, uap); in sbsa_uart_probe()
2931 return pl011_register_port(uap); in sbsa_uart_probe()
2936 struct uart_amba_port *uap = platform_get_drvdata(pdev); in sbsa_uart_remove() local
2938 uart_remove_one_port(&amba_reg, &uap->port); in sbsa_uart_remove()
2939 pl011_unregister_port(uap); in sbsa_uart_remove()