Lines Matching refs:sport

271 static inline void imx_uart_writel(struct imx_port *sport, u32 val, u32 offset)
273 writel(val, sport->port.membase + offset);
276 static inline u32 imx_uart_readl(struct imx_port *sport, u32 offset)
278 return readl(sport->port.membase + offset);
281 static inline unsigned imx_uart_uts_reg(struct imx_port *sport)
283 return sport->devdata->uts_reg;
286 static inline int imx_uart_is_imx1(struct imx_port *sport)
288 return sport->devdata->devtype == IMX1_UART;
295 static void imx_uart_ucrs_save(struct imx_port *sport,
299 ucr->ucr1 = imx_uart_readl(sport, UCR1);
300 ucr->ucr2 = imx_uart_readl(sport, UCR2);
301 ucr->ucr3 = imx_uart_readl(sport, UCR3);
304 static void imx_uart_ucrs_restore(struct imx_port *sport,
308 imx_uart_writel(sport, ucr->ucr1, UCR1);
309 imx_uart_writel(sport, ucr->ucr2, UCR2);
310 imx_uart_writel(sport, ucr->ucr3, UCR3);
315 static void imx_uart_rts_active(struct imx_port *sport, u32 *ucr2)
319 mctrl_gpio_set(sport->gpios, sport->port.mctrl | TIOCM_RTS);
323 static void imx_uart_rts_inactive(struct imx_port *sport, u32 *ucr2)
328 mctrl_gpio_set(sport->gpios, sport->port.mctrl & ~TIOCM_RTS);
337 static void imx_uart_soft_reset(struct imx_port *sport)
353 ubir = imx_uart_readl(sport, UBIR);
354 ubmr = imx_uart_readl(sport, UBMR);
355 uts = imx_uart_readl(sport, IMX21_UTS);
357 ucr2 = imx_uart_readl(sport, UCR2);
358 imx_uart_writel(sport, ucr2 & ~UCR2_SRST, UCR2);
360 while (!(imx_uart_readl(sport, UCR2) & UCR2_SRST) && (--i > 0))
364 imx_uart_writel(sport, ubir, UBIR);
365 imx_uart_writel(sport, ubmr, UBMR);
366 imx_uart_writel(sport, uts, IMX21_UTS);
368 sport->idle_counter = 0;
371 static void imx_uart_disable_loopback_rs485(struct imx_port *sport)
376 uts = imx_uart_readl(sport, imx_uart_uts_reg(sport));
378 imx_uart_writel(sport, uts, imx_uart_uts_reg(sport));
384 struct imx_port *sport = (struct imx_port *)port;
387 ucr1 = imx_uart_readl(sport, UCR1);
388 ucr2 = imx_uart_readl(sport, UCR2);
392 if (sport->dma_is_enabled) {
400 imx_uart_writel(sport, ucr2, UCR2);
401 imx_uart_writel(sport, ucr1, UCR1);
402 imx_uart_disable_loopback_rs485(sport);
408 struct imx_port *sport = (struct imx_port *)port;
411 if (sport->tx_state == OFF)
418 if (sport->dma_is_txing)
421 ucr1 = imx_uart_readl(sport, UCR1);
422 imx_uart_writel(sport, ucr1 & ~UCR1_TRDYEN, UCR1);
424 ucr4 = imx_uart_readl(sport, UCR4);
425 usr2 = imx_uart_readl(sport, USR2);
432 imx_uart_writel(sport, ucr4, UCR4);
436 if (sport->tx_state == SEND) {
437 sport->tx_state = WAIT_AFTER_SEND;
440 start_hrtimer_ms(&sport->trigger_stop_tx,
448 if (sport->tx_state == WAIT_AFTER_RTS ||
449 sport->tx_state == WAIT_AFTER_SEND) {
452 hrtimer_try_to_cancel(&sport->trigger_start_tx);
454 ucr2 = imx_uart_readl(sport, UCR2);
456 imx_uart_rts_active(sport, &ucr2);
458 imx_uart_rts_inactive(sport, &ucr2);
459 imx_uart_writel(sport, ucr2, UCR2);
464 sport->tx_state = OFF;
467 sport->tx_state = OFF;
474 struct imx_port *sport = (struct imx_port *)port;
477 ucr1 = imx_uart_readl(sport, UCR1);
478 ucr2 = imx_uart_readl(sport, UCR2);
479 ucr4 = imx_uart_readl(sport, UCR4);
481 if (sport->dma_is_enabled) {
488 imx_uart_writel(sport, ucr1, UCR1);
489 imx_uart_writel(sport, ucr4, UCR4);
494 sport->have_rtscts && !sport->have_rtsgpio) {
495 uts = imx_uart_readl(sport, imx_uart_uts_reg(sport));
497 imx_uart_writel(sport, uts, imx_uart_uts_reg(sport));
503 imx_uart_writel(sport, ucr2, UCR2);
509 struct imx_port *sport = (struct imx_port *)port;
511 mod_timer(&sport->timer, jiffies);
513 mctrl_gpio_enable_ms(sport->gpios);
516 static void imx_uart_dma_tx(struct imx_port *sport);
519 static inline void imx_uart_transmit_buffer(struct imx_port *sport)
521 struct circ_buf *xmit = &sport->port.state->xmit;
523 if (sport->port.x_char) {
525 imx_uart_writel(sport, sport->port.x_char, URTX0);
526 sport->port.icount.tx++;
527 sport->port.x_char = 0;
531 if (uart_circ_empty(xmit) || uart_tx_stopped(&sport->port)) {
532 imx_uart_stop_tx(&sport->port);
536 if (sport->dma_is_enabled) {
542 ucr1 = imx_uart_readl(sport, UCR1);
544 if (sport->dma_is_txing) {
546 imx_uart_writel(sport, ucr1, UCR1);
548 imx_uart_writel(sport, ucr1, UCR1);
549 imx_uart_dma_tx(sport);
556 !(imx_uart_readl(sport, imx_uart_uts_reg(sport)) & UTS_TXFULL)) {
559 imx_uart_writel(sport, xmit->buf[xmit->tail], URTX0);
560 uart_xmit_advance(&sport->port, 1);
564 uart_write_wakeup(&sport->port);
567 imx_uart_stop_tx(&sport->port);
572 struct imx_port *sport = data;
573 struct scatterlist *sgl = &sport->tx_sgl[0];
574 struct circ_buf *xmit = &sport->port.state->xmit;
578 spin_lock_irqsave(&sport->port.lock, flags);
580 dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE);
582 ucr1 = imx_uart_readl(sport, UCR1);
584 imx_uart_writel(sport, ucr1, UCR1);
586 uart_xmit_advance(&sport->port, sport->tx_bytes);
588 dev_dbg(sport->port.dev, "we finish the TX DMA.\n");
590 sport->dma_is_txing = 0;
593 uart_write_wakeup(&sport->port);
595 if (!uart_circ_empty(xmit) && !uart_tx_stopped(&sport->port))
596 imx_uart_dma_tx(sport);
597 else if (sport->port.rs485.flags & SER_RS485_ENABLED) {
598 u32 ucr4 = imx_uart_readl(sport, UCR4);
600 imx_uart_writel(sport, ucr4, UCR4);
603 spin_unlock_irqrestore(&sport->port.lock, flags);
607 static void imx_uart_dma_tx(struct imx_port *sport)
609 struct circ_buf *xmit = &sport->port.state->xmit;
610 struct scatterlist *sgl = sport->tx_sgl;
612 struct dma_chan *chan = sport->dma_chan_tx;
613 struct device *dev = sport->port.dev;
617 if (sport->dma_is_txing)
620 ucr4 = imx_uart_readl(sport, UCR4);
622 imx_uart_writel(sport, ucr4, UCR4);
624 sport->tx_bytes = uart_circ_chars_pending(xmit);
627 sport->dma_tx_nents = 1;
628 sg_init_one(sgl, xmit->buf + xmit->tail, sport->tx_bytes);
630 sport->dma_tx_nents = 2;
637 ret = dma_map_sg(dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE);
645 dma_unmap_sg(dev, sgl, sport->dma_tx_nents,
651 desc->callback_param = sport;
656 ucr1 = imx_uart_readl(sport, UCR1);
658 imx_uart_writel(sport, ucr1, UCR1);
661 sport->dma_is_txing = 1;
670 struct imx_port *sport = (struct imx_port *)port;
673 if (!sport->port.x_char && uart_circ_empty(&port->state->xmit))
677 * We cannot simply do nothing here if sport->tx_state == SEND already
683 if (sport->tx_state == OFF) {
684 u32 ucr2 = imx_uart_readl(sport, UCR2);
686 imx_uart_rts_active(sport, &ucr2);
688 imx_uart_rts_inactive(sport, &ucr2);
689 imx_uart_writel(sport, ucr2, UCR2);
695 sport->tx_state = WAIT_AFTER_RTS;
698 start_hrtimer_ms(&sport->trigger_start_tx,
706 if (sport->tx_state == WAIT_AFTER_SEND
707 || sport->tx_state == WAIT_AFTER_RTS) {
709 hrtimer_try_to_cancel(&sport->trigger_stop_tx);
716 if (!sport->dma_is_enabled) {
717 u32 ucr4 = imx_uart_readl(sport, UCR4);
719 imx_uart_writel(sport, ucr4, UCR4);
722 sport->tx_state = SEND;
725 sport->tx_state = SEND;
728 if (!sport->dma_is_enabled) {
729 ucr1 = imx_uart_readl(sport, UCR1);
730 imx_uart_writel(sport, ucr1 | UCR1_TRDYEN, UCR1);
733 if (sport->dma_is_enabled) {
734 if (sport->port.x_char) {
737 ucr1 = imx_uart_readl(sport, UCR1);
740 imx_uart_writel(sport, ucr1, UCR1);
746 imx_uart_dma_tx(sport);
753 struct imx_port *sport = dev_id;
756 imx_uart_writel(sport, USR1_RTSD, USR1);
757 usr1 = imx_uart_readl(sport, USR1) & USR1_RTSS;
758 uart_handle_cts_change(&sport->port, usr1);
759 wake_up_interruptible(&sport->port.state->port.delta_msr_wait);
766 struct imx_port *sport = dev_id;
769 spin_lock(&sport->port.lock);
773 spin_unlock(&sport->port.lock);
780 struct imx_port *sport = dev_id;
782 spin_lock(&sport->port.lock);
783 imx_uart_transmit_buffer(sport);
784 spin_unlock(&sport->port.lock);
800 static void imx_uart_check_flood(struct imx_port *sport, u32 usr2)
826 imx_uart_writel(sport, USR2_WAKE, USR2);
827 sport->idle_counter = 0;
828 } else if (++sport->idle_counter > 3) {
829 dev_warn(sport->port.dev, "RX flood detected: soft reset.");
830 imx_uart_soft_reset(sport); /* also clears 'sport->idle_counter' */
836 struct imx_port *sport = dev_id;
837 struct tty_port *port = &sport->port.state->port;
841 usr2 = imx_uart_readl(sport, USR2);
843 imx_uart_check_flood(sport, usr2);
845 while ((rx = imx_uart_readl(sport, URXD0)) & URXD_CHARRDY) {
847 sport->port.icount.rx++;
851 sport->port.icount.brk++;
852 if (uart_handle_break(&sport->port))
856 sport->port.icount.parity++;
858 sport->port.icount.frame++;
860 sport->port.icount.overrun++;
862 if (rx & sport->port.ignore_status_mask)
865 rx &= (sport->port.read_status_mask | 0xFF);
876 sport->port.sysrq = 0;
877 } else if (uart_handle_sysrq_char(&sport->port, (unsigned char)rx)) {
881 if (sport->port.ignore_status_mask & URXD_DUMMY_READ)
885 sport->port.icount.buf_overrun++;
895 struct imx_port *sport = dev_id;
898 spin_lock(&sport->port.lock);
902 spin_unlock(&sport->port.lock);
907 static void imx_uart_clear_rx_errors(struct imx_port *sport);
912 static unsigned int imx_uart_get_hwmctrl(struct imx_port *sport)
915 unsigned usr1 = imx_uart_readl(sport, USR1);
916 unsigned usr2 = imx_uart_readl(sport, USR2);
925 if (sport->dte_mode)
926 if (!(imx_uart_readl(sport, USR2) & USR2_RIIN))
935 static void imx_uart_mctrl_check(struct imx_port *sport)
939 status = imx_uart_get_hwmctrl(sport);
940 changed = status ^ sport->old_status;
945 sport->old_status = status;
948 sport->port.icount.rng++;
950 sport->port.icount.dsr++;
952 uart_handle_dcd_change(&sport->port, status & TIOCM_CAR);
954 uart_handle_cts_change(&sport->port, status & TIOCM_CTS);
956 wake_up_interruptible(&sport->port.state->port.delta_msr_wait);
961 struct imx_port *sport = dev_id;
965 spin_lock(&sport->port.lock);
967 usr1 = imx_uart_readl(sport, USR1);
968 usr2 = imx_uart_readl(sport, USR2);
969 ucr1 = imx_uart_readl(sport, UCR1);
970 ucr2 = imx_uart_readl(sport, UCR2);
971 ucr3 = imx_uart_readl(sport, UCR3);
972 ucr4 = imx_uart_readl(sport, UCR4);
1000 imx_uart_writel(sport, USR1_AGTIM, USR1);
1007 imx_uart_transmit_buffer(sport);
1012 imx_uart_writel(sport, USR1_DTRD, USR1);
1014 imx_uart_mctrl_check(sport);
1025 imx_uart_writel(sport, USR1_AWAKE, USR1);
1030 sport->port.icount.overrun++;
1031 imx_uart_writel(sport, USR2_ORE, USR2);
1035 spin_unlock(&sport->port.lock);
1045 struct imx_port *sport = (struct imx_port *)port;
1048 ret = (imx_uart_readl(sport, USR2) & USR2_TXDC) ? TIOCSER_TEMT : 0;
1051 if (sport->dma_is_txing)
1060 struct imx_port *sport = (struct imx_port *)port;
1061 unsigned int ret = imx_uart_get_hwmctrl(sport);
1063 mctrl_gpio_get(sport->gpios, &ret);
1071 struct imx_port *sport = (struct imx_port *)port;
1081 ucr2 = imx_uart_readl(sport, UCR2);
1093 imx_uart_writel(sport, ucr2, UCR2);
1096 ucr3 = imx_uart_readl(sport, UCR3) & ~UCR3_DSR;
1099 imx_uart_writel(sport, ucr3, UCR3);
1101 uts = imx_uart_readl(sport, imx_uart_uts_reg(sport)) & ~UTS_LOOP;
1104 imx_uart_writel(sport, uts, imx_uart_uts_reg(sport));
1106 mctrl_gpio_set(sport->gpios, mctrl);
1114 struct imx_port *sport = (struct imx_port *)port;
1118 spin_lock_irqsave(&sport->port.lock, flags);
1120 ucr1 = imx_uart_readl(sport, UCR1) & ~UCR1_SNDBRK;
1125 imx_uart_writel(sport, ucr1, UCR1);
1127 spin_unlock_irqrestore(&sport->port.lock, flags);
1136 struct imx_port *sport = from_timer(sport, t, timer);
1139 if (sport->port.state) {
1140 spin_lock_irqsave(&sport->port.lock, flags);
1141 imx_uart_mctrl_check(sport);
1142 spin_unlock_irqrestore(&sport->port.lock, flags);
1144 mod_timer(&sport->timer, jiffies + MCTRL_TIMEOUT);
1158 struct imx_port *sport = data;
1159 struct dma_chan *chan = sport->dma_chan_rx;
1160 struct scatterlist *sgl = &sport->rx_sgl;
1161 struct tty_port *port = &sport->port.state->port;
1163 struct circ_buf *rx_ring = &sport->rx_ring;
1169 status = dmaengine_tx_status(chan, sport->rx_cookie, &state);
1172 spin_lock(&sport->port.lock);
1173 imx_uart_clear_rx_errors(sport);
1174 spin_unlock(&sport->port.lock);
1193 bd_size = sg_dma_len(sgl) / sport->rx_periods;
1203 spin_lock(&sport->port.lock);
1204 imx_uart_check_flood(sport, imx_uart_readl(sport, USR2));
1205 spin_unlock(&sport->port.lock);
1207 if (!(sport->port.ignore_status_mask & URXD_DUMMY_READ)) {
1210 dma_sync_sg_for_cpu(sport->port.dev, sgl, 1,
1214 sport->rx_buf + rx_ring->tail, r_bytes);
1217 dma_sync_sg_for_device(sport->port.dev, sgl, 1,
1221 sport->port.icount.buf_overrun++;
1223 sport->port.icount.rx += w_bytes;
1232 dev_dbg(sport->port.dev, "We get %d bytes.\n", w_bytes);
1236 static int imx_uart_start_rx_dma(struct imx_port *sport)
1238 struct scatterlist *sgl = &sport->rx_sgl;
1239 struct dma_chan *chan = sport->dma_chan_rx;
1240 struct device *dev = sport->port.dev;
1244 sport->rx_ring.head = 0;
1245 sport->rx_ring.tail = 0;
1247 sg_init_one(sgl, sport->rx_buf, sport->rx_buf_size);
1255 sg_dma_len(sgl), sg_dma_len(sgl) / sport->rx_periods,
1264 desc->callback_param = sport;
1267 sport->dma_is_rxing = 1;
1268 sport->rx_cookie = dmaengine_submit(desc);
1273 static void imx_uart_clear_rx_errors(struct imx_port *sport)
1275 struct tty_port *port = &sport->port.state->port;
1278 usr1 = imx_uart_readl(sport, USR1);
1279 usr2 = imx_uart_readl(sport, USR2);
1282 sport->port.icount.brk++;
1283 imx_uart_writel(sport, USR2_BRCD, USR2);
1284 uart_handle_break(&sport->port);
1286 sport->port.icount.buf_overrun++;
1290 sport->port.icount.frame++;
1291 imx_uart_writel(sport, USR1_FRAMERR, USR1);
1293 sport->port.icount.parity++;
1294 imx_uart_writel(sport, USR1_PARITYERR, USR1);
1299 sport->port.icount.overrun++;
1300 imx_uart_writel(sport, USR2_ORE, USR2);
1303 sport->idle_counter = 0;
1312 static void imx_uart_setup_ufcr(struct imx_port *sport,
1318 val = imx_uart_readl(sport, UFCR) & (UFCR_RFDIV | UFCR_DCEDTE);
1320 imx_uart_writel(sport, val, UFCR);
1323 static void imx_uart_dma_exit(struct imx_port *sport)
1325 if (sport->dma_chan_rx) {
1326 dmaengine_terminate_sync(sport->dma_chan_rx);
1327 dma_release_channel(sport->dma_chan_rx);
1328 sport->dma_chan_rx = NULL;
1329 sport->rx_cookie = -EINVAL;
1330 kfree(sport->rx_buf);
1331 sport->rx_buf = NULL;
1334 if (sport->dma_chan_tx) {
1335 dmaengine_terminate_sync(sport->dma_chan_tx);
1336 dma_release_channel(sport->dma_chan_tx);
1337 sport->dma_chan_tx = NULL;
1341 static int imx_uart_dma_init(struct imx_port *sport)
1344 struct device *dev = sport->port.dev;
1348 sport->dma_chan_rx = dma_request_slave_channel(dev, "rx");
1349 if (!sport->dma_chan_rx) {
1356 slave_config.src_addr = sport->port.mapbase + URXD0;
1360 ret = dmaengine_slave_config(sport->dma_chan_rx, &slave_config);
1366 sport->rx_buf_size = sport->rx_period_length * sport->rx_periods;
1367 sport->rx_buf = kzalloc(sport->rx_buf_size, GFP_KERNEL);
1368 if (!sport->rx_buf) {
1372 sport->rx_ring.buf = sport->rx_buf;
1375 sport->dma_chan_tx = dma_request_slave_channel(dev, "tx");
1376 if (!sport->dma_chan_tx) {
1383 slave_config.dst_addr = sport->port.mapbase + URTX0;
1386 ret = dmaengine_slave_config(sport->dma_chan_tx, &slave_config);
1394 imx_uart_dma_exit(sport);
1398 static void imx_uart_enable_dma(struct imx_port *sport)
1402 imx_uart_setup_ufcr(sport, TXTL_DMA, RXTL_DMA);
1405 ucr1 = imx_uart_readl(sport, UCR1);
1407 imx_uart_writel(sport, ucr1, UCR1);
1409 sport->dma_is_enabled = 1;
1412 static void imx_uart_disable_dma(struct imx_port *sport)
1417 ucr1 = imx_uart_readl(sport, UCR1);
1419 imx_uart_writel(sport, ucr1, UCR1);
1421 imx_uart_setup_ufcr(sport, TXTL_DEFAULT, RXTL_DEFAULT);
1423 sport->dma_is_enabled = 0;
1431 struct imx_port *sport = (struct imx_port *)port;
1437 retval = clk_prepare_enable(sport->clk_per);
1440 retval = clk_prepare_enable(sport->clk_ipg);
1442 clk_disable_unprepare(sport->clk_per);
1446 imx_uart_setup_ufcr(sport, TXTL_DEFAULT, RXTL_DEFAULT);
1451 ucr4 = imx_uart_readl(sport, UCR4);
1457 imx_uart_writel(sport, ucr4 & ~UCR4_DREN, UCR4);
1460 if (!uart_console(port) && imx_uart_dma_init(sport) == 0)
1463 spin_lock_irqsave(&sport->port.lock, flags);
1466 imx_uart_soft_reset(sport);
1471 imx_uart_writel(sport, USR1_RTSD | USR1_DTRD, USR1);
1472 imx_uart_writel(sport, USR2_ORE, USR2);
1474 ucr1 = imx_uart_readl(sport, UCR1) & ~UCR1_RRDYEN;
1476 if (sport->have_rtscts)
1479 imx_uart_writel(sport, ucr1, UCR1);
1481 ucr4 = imx_uart_readl(sport, UCR4) & ~(UCR4_OREN | UCR4_INVR);
1484 if (sport->inverted_rx)
1486 imx_uart_writel(sport, ucr4, UCR4);
1488 ucr3 = imx_uart_readl(sport, UCR3) & ~UCR3_INVT;
1492 if (sport->inverted_tx)
1495 if (!imx_uart_is_imx1(sport)) {
1498 if (sport->dte_mode)
1502 imx_uart_writel(sport, ucr3, UCR3);
1504 ucr2 = imx_uart_readl(sport, UCR2) & ~UCR2_ATEN;
1506 if (!sport->have_rtscts)
1512 if (!imx_uart_is_imx1(sport))
1514 imx_uart_writel(sport, ucr2, UCR2);
1519 imx_uart_enable_ms(&sport->port);
1522 imx_uart_enable_dma(sport);
1523 imx_uart_start_rx_dma(sport);
1525 ucr1 = imx_uart_readl(sport, UCR1);
1527 imx_uart_writel(sport, ucr1, UCR1);
1529 ucr2 = imx_uart_readl(sport, UCR2);
1531 imx_uart_writel(sport, ucr2, UCR2);
1534 imx_uart_disable_loopback_rs485(sport);
1536 spin_unlock_irqrestore(&sport->port.lock, flags);
1543 struct imx_port *sport = (struct imx_port *)port;
1547 if (sport->dma_is_enabled) {
1548 dmaengine_terminate_sync(sport->dma_chan_tx);
1549 if (sport->dma_is_txing) {
1550 dma_unmap_sg(sport->port.dev, &sport->tx_sgl[0],
1551 sport->dma_tx_nents, DMA_TO_DEVICE);
1552 sport->dma_is_txing = 0;
1554 dmaengine_terminate_sync(sport->dma_chan_rx);
1555 if (sport->dma_is_rxing) {
1556 dma_unmap_sg(sport->port.dev, &sport->rx_sgl,
1558 sport->dma_is_rxing = 0;
1561 spin_lock_irqsave(&sport->port.lock, flags);
1564 imx_uart_disable_dma(sport);
1565 spin_unlock_irqrestore(&sport->port.lock, flags);
1566 imx_uart_dma_exit(sport);
1569 mctrl_gpio_disable_ms(sport->gpios);
1571 spin_lock_irqsave(&sport->port.lock, flags);
1572 ucr2 = imx_uart_readl(sport, UCR2);
1574 imx_uart_writel(sport, ucr2, UCR2);
1575 spin_unlock_irqrestore(&sport->port.lock, flags);
1580 del_timer_sync(&sport->timer);
1586 spin_lock_irqsave(&sport->port.lock, flags);
1588 ucr1 = imx_uart_readl(sport, UCR1);
1594 sport->have_rtscts && !sport->have_rtsgpio) {
1595 uts = imx_uart_readl(sport, imx_uart_uts_reg(sport));
1597 imx_uart_writel(sport, uts, imx_uart_uts_reg(sport));
1602 imx_uart_writel(sport, ucr1, UCR1);
1604 ucr4 = imx_uart_readl(sport, UCR4);
1606 imx_uart_writel(sport, ucr4, UCR4);
1608 spin_unlock_irqrestore(&sport->port.lock, flags);
1610 clk_disable_unprepare(sport->clk_per);
1611 clk_disable_unprepare(sport->clk_ipg);
1617 struct imx_port *sport = (struct imx_port *)port;
1618 struct scatterlist *sgl = &sport->tx_sgl[0];
1620 if (!sport->dma_chan_tx)
1623 sport->tx_bytes = 0;
1624 dmaengine_terminate_all(sport->dma_chan_tx);
1625 if (sport->dma_is_txing) {
1628 dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents,
1630 ucr1 = imx_uart_readl(sport, UCR1);
1632 imx_uart_writel(sport, ucr1, UCR1);
1633 sport->dma_is_txing = 0;
1636 imx_uart_soft_reset(sport);
1644 struct imx_port *sport = (struct imx_port *)port;
1663 del_timer_sync(&sport->timer);
1671 spin_lock_irqsave(&sport->port.lock, flags);
1677 old_ucr2 = imx_uart_readl(sport, UCR2);
1684 if (!sport->have_rtscts)
1694 imx_uart_rts_active(sport, &ucr2);
1696 imx_uart_rts_inactive(sport, &ucr2);
1717 sport->port.read_status_mask = 0;
1719 sport->port.read_status_mask |= (URXD_FRMERR | URXD_PRERR);
1721 sport->port.read_status_mask |= URXD_BRK;
1726 sport->port.ignore_status_mask = 0;
1728 sport->port.ignore_status_mask |= URXD_PRERR | URXD_FRMERR;
1730 sport->port.ignore_status_mask |= URXD_BRK;
1736 sport->port.ignore_status_mask |= URXD_OVRRUN;
1740 sport->port.ignore_status_mask |= URXD_DUMMY_READ;
1748 div = sport->port.uartclk / (baud * 16);
1750 baud = sport->port.uartclk / (quot * 16);
1752 div = sport->port.uartclk / (baud * 16);
1758 rational_best_approximation(16 * div * baud, sport->port.uartclk,
1761 tdiv64 = sport->port.uartclk;
1770 ufcr = imx_uart_readl(sport, UFCR);
1772 imx_uart_writel(sport, ufcr, UFCR);
1783 old_ubir = imx_uart_readl(sport, UBIR);
1784 old_ubmr = imx_uart_readl(sport, UBMR);
1786 imx_uart_writel(sport, num, UBIR);
1787 imx_uart_writel(sport, denom, UBMR);
1790 if (!imx_uart_is_imx1(sport))
1791 imx_uart_writel(sport, sport->port.uartclk / div / 1000,
1794 imx_uart_writel(sport, ucr2, UCR2);
1796 if (UART_ENABLE_MS(&sport->port, termios->c_cflag))
1797 imx_uart_enable_ms(&sport->port);
1799 spin_unlock_irqrestore(&sport->port.lock, flags);
1847 struct imx_port *sport = (struct imx_port *)port;
1852 retval = clk_prepare_enable(sport->clk_ipg);
1855 retval = clk_prepare_enable(sport->clk_per);
1857 clk_disable_unprepare(sport->clk_ipg);
1859 imx_uart_setup_ufcr(sport, TXTL_DEFAULT, RXTL_DEFAULT);
1861 spin_lock_irqsave(&sport->port.lock, flags);
1870 ucr1 = imx_uart_readl(sport, UCR1);
1871 ucr2 = imx_uart_readl(sport, UCR2);
1873 if (imx_uart_is_imx1(sport))
1882 imx_uart_writel(sport, ucr1, UCR1);
1883 imx_uart_writel(sport, ucr2, UCR2);
1886 imx_uart_writel(sport, ucr1 | UCR1_RRDYEN, UCR1);
1887 imx_uart_writel(sport, ucr2 | UCR2_ATEN, UCR2);
1889 spin_unlock_irqrestore(&sport->port.lock, flags);
1896 struct imx_port *sport = (struct imx_port *)port;
1897 if (!(imx_uart_readl(sport, USR2) & USR2_RDR))
1900 return imx_uart_readl(sport, URXD0) & URXD_RX_DATA;
1905 struct imx_port *sport = (struct imx_port *)port;
1910 status = imx_uart_readl(sport, USR1);
1914 imx_uart_writel(sport, c, URTX0);
1918 status = imx_uart_readl(sport, USR2);
1927 struct imx_port *sport = (struct imx_port *)port;
1932 if (sport->have_rtscts && !sport->have_rtsgpio &&
1937 ucr2 = imx_uart_readl(sport, UCR2);
1939 imx_uart_rts_active(sport, &ucr2);
1941 imx_uart_rts_inactive(sport, &ucr2);
1942 imx_uart_writel(sport, ucr2, UCR2);
1981 struct imx_port *sport = (struct imx_port *)port;
1983 while (imx_uart_readl(sport, imx_uart_uts_reg(sport)) & UTS_TXFULL)
1986 imx_uart_writel(sport, ch, URTX0);
1995 struct imx_port *sport = imx_uart_ports[co->index];
2001 if (sport->port.sysrq)
2004 locked = spin_trylock_irqsave(&sport->port.lock, flags);
2006 spin_lock_irqsave(&sport->port.lock, flags);
2011 imx_uart_ucrs_save(sport, &old_ucr);
2014 if (imx_uart_is_imx1(sport))
2019 imx_uart_writel(sport, ucr1, UCR1);
2021 imx_uart_writel(sport, old_ucr.ucr2 | UCR2_TXEN, UCR2);
2023 uart_console_write(&sport->port, s, count, imx_uart_console_putchar);
2029 while (!(imx_uart_readl(sport, USR2) & USR2_TXDC));
2031 imx_uart_ucrs_restore(sport, &old_ucr);
2034 spin_unlock_irqrestore(&sport->port.lock, flags);
2042 imx_uart_console_get_options(struct imx_port *sport, int *baud,
2046 if (imx_uart_readl(sport, UCR1) & UCR1_UARTEN) {
2052 ucr2 = imx_uart_readl(sport, UCR2);
2067 ubir = imx_uart_readl(sport, UBIR) & 0xffff;
2068 ubmr = imx_uart_readl(sport, UBMR) & 0xffff;
2070 ucfr_rfdiv = (imx_uart_readl(sport, UFCR) & UFCR_RFDIV) >> 7;
2076 uartclk = clk_get_rate(sport->clk_per);
2095 dev_info(sport->port.dev, "Console IMX rounded baud rate from %d to %d\n",
2103 struct imx_port *sport;
2117 sport = imx_uart_ports[co->index];
2118 if (sport == NULL)
2122 retval = clk_prepare_enable(sport->clk_ipg);
2129 imx_uart_console_get_options(sport, &baud, &parity, &bits);
2131 imx_uart_setup_ufcr(sport, TXTL_DEFAULT, RXTL_DEFAULT);
2133 retval = uart_set_options(&sport->port, co, baud, parity, bits, flow);
2136 clk_disable_unprepare(sport->clk_ipg);
2140 retval = clk_prepare_enable(sport->clk_per);
2142 clk_disable_unprepare(sport->clk_ipg);
2151 struct imx_port *sport = imx_uart_ports[co->index];
2153 clk_disable_unprepare(sport->clk_per);
2154 clk_disable_unprepare(sport->clk_ipg);
2189 struct imx_port *sport = container_of(t, struct imx_port, trigger_start_tx);
2192 spin_lock_irqsave(&sport->port.lock, flags);
2193 if (sport->tx_state == WAIT_AFTER_RTS)
2194 imx_uart_start_tx(&sport->port);
2195 spin_unlock_irqrestore(&sport->port.lock, flags);
2202 struct imx_port *sport = container_of(t, struct imx_port, trigger_stop_tx);
2205 spin_lock_irqsave(&sport->port.lock, flags);
2206 if (sport->tx_state == WAIT_AFTER_SEND)
2207 imx_uart_stop_tx(&sport->port);
2208 spin_unlock_irqrestore(&sport->port.lock, flags);
2227 struct imx_port *sport;
2235 sport = devm_kzalloc(&pdev->dev, sizeof(*sport), GFP_KERNEL);
2236 if (!sport)
2239 sport->devdata = of_device_get_match_data(&pdev->dev);
2246 sport->port.line = ret;
2248 sport->have_rtscts = of_property_read_bool(np, "uart-has-rtscts") ||
2251 sport->dte_mode = of_property_read_bool(np, "fsl,dte-mode");
2253 sport->have_rtsgpio = of_property_present(np, "rts-gpios");
2255 sport->inverted_tx = of_property_read_bool(np, "fsl,inverted-tx");
2257 sport->inverted_rx = of_property_read_bool(np, "fsl,inverted-rx");
2260 sport->rx_period_length = dma_buf_conf[0];
2261 sport->rx_periods = dma_buf_conf[1];
2263 sport->rx_period_length = RX_DMA_PERIOD_LEN;
2264 sport->rx_periods = RX_DMA_PERIODS;
2267 if (sport->port.line >= ARRAY_SIZE(imx_uart_ports)) {
2269 sport->port.line);
2283 sport->port.dev = &pdev->dev;
2284 sport->port.mapbase = res->start;
2285 sport->port.membase = base;
2286 sport->port.type = PORT_IMX;
2287 sport->port.iotype = UPIO_MEM;
2288 sport->port.irq = rxirq;
2289 sport->port.fifosize = 32;
2290 sport->port.has_sysrq = IS_ENABLED(CONFIG_SERIAL_IMX_CONSOLE);
2291 sport->port.ops = &imx_uart_pops;
2292 sport->port.rs485_config = imx_uart_rs485_config;
2294 if (sport->have_rtscts || sport->have_rtsgpio)
2295 sport->port.rs485_supported = imx_rs485_supported;
2296 sport->port.flags = UPF_BOOT_AUTOCONF;
2297 timer_setup(&sport->timer, imx_uart_timeout, 0);
2299 sport->gpios = mctrl_gpio_init(&sport->port, 0);
2300 if (IS_ERR(sport->gpios))
2301 return PTR_ERR(sport->gpios);
2303 sport->clk_ipg = devm_clk_get(&pdev->dev, "ipg");
2304 if (IS_ERR(sport->clk_ipg)) {
2305 ret = PTR_ERR(sport->clk_ipg);
2310 sport->clk_per = devm_clk_get(&pdev->dev, "per");
2311 if (IS_ERR(sport->clk_per)) {
2312 ret = PTR_ERR(sport->clk_per);
2317 sport->port.uartclk = clk_get_rate(sport->clk_per);
2320 ret = clk_prepare_enable(sport->clk_ipg);
2326 ret = uart_get_rs485_mode(&sport->port);
2335 if (sport->port.rs485.flags & SER_RS485_ENABLED &&
2336 sport->have_rtscts && !sport->have_rtsgpio &&
2337 (!(sport->port.rs485.flags & SER_RS485_RTS_ON_SEND) &&
2338 !(sport->port.rs485.flags & SER_RS485_RX_DURING_TX)))
2343 ucr1 = imx_uart_readl(sport, UCR1);
2345 imx_uart_writel(sport, ucr1, UCR1);
2348 ucr2 = imx_uart_readl(sport, UCR2);
2350 imx_uart_writel(sport, ucr2, UCR2);
2362 if (sport->port.rs485.flags & SER_RS485_ENABLED &&
2363 sport->have_rtscts && !sport->have_rtsgpio) {
2364 uts = imx_uart_readl(sport, imx_uart_uts_reg(sport));
2366 imx_uart_writel(sport, uts, imx_uart_uts_reg(sport));
2368 ucr1 = imx_uart_readl(sport, UCR1);
2370 imx_uart_writel(sport, ucr1, UCR1);
2372 ucr2 = imx_uart_readl(sport, UCR2);
2374 imx_uart_writel(sport, ucr2, UCR2);
2377 if (!imx_uart_is_imx1(sport) && sport->dte_mode) {
2384 u32 ufcr = imx_uart_readl(sport, UFCR);
2386 imx_uart_writel(sport, ufcr | UFCR_DCEDTE, UFCR);
2393 imx_uart_writel(sport,
2399 u32 ufcr = imx_uart_readl(sport, UFCR);
2401 imx_uart_writel(sport, ufcr & ~UFCR_DCEDTE, UFCR);
2403 if (!imx_uart_is_imx1(sport))
2405 imx_uart_writel(sport, ucr3, UCR3);
2408 hrtimer_init(&sport->trigger_start_tx, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
2409 hrtimer_init(&sport->trigger_stop_tx, CLOCK_MONOTONIC, HRTIMER_MODE_REL);
2410 sport->trigger_start_tx.function = imx_trigger_start_tx;
2411 sport->trigger_stop_tx.function = imx_trigger_stop_tx;
2419 dev_name(&pdev->dev), sport);
2427 dev_name(&pdev->dev), sport);
2435 dev_name(&pdev->dev), sport);
2443 dev_name(&pdev->dev), sport);
2450 imx_uart_ports[sport->port.line] = sport;
2452 platform_set_drvdata(pdev, sport);
2454 ret = uart_add_one_port(&imx_uart_uart_driver, &sport->port);
2457 clk_disable_unprepare(sport->clk_ipg);
2464 struct imx_port *sport = platform_get_drvdata(pdev);
2466 uart_remove_one_port(&imx_uart_uart_driver, &sport->port);
2471 static void imx_uart_restore_context(struct imx_port *sport)
2475 spin_lock_irqsave(&sport->port.lock, flags);
2476 if (!sport->context_saved) {
2477 spin_unlock_irqrestore(&sport->port.lock, flags);
2481 imx_uart_writel(sport, sport->saved_reg[4], UFCR);
2482 imx_uart_writel(sport, sport->saved_reg[5], UESC);
2483 imx_uart_writel(sport, sport->saved_reg[6], UTIM);
2484 imx_uart_writel(sport, sport->saved_reg[7], UBIR);
2485 imx_uart_writel(sport, sport->saved_reg[8], UBMR);
2486 imx_uart_writel(sport, sport->saved_reg[9], IMX21_UTS);
2487 imx_uart_writel(sport, sport->saved_reg[0], UCR1);
2488 imx_uart_writel(sport, sport->saved_reg[1] | UCR2_SRST, UCR2);
2489 imx_uart_writel(sport, sport->saved_reg[2], UCR3);
2490 imx_uart_writel(sport, sport->saved_reg[3], UCR4);
2491 sport->context_saved = false;
2492 spin_unlock_irqrestore(&sport->port.lock, flags);
2495 static void imx_uart_save_context(struct imx_port *sport)
2500 spin_lock_irqsave(&sport->port.lock, flags);
2501 sport->saved_reg[0] = imx_uart_readl(sport, UCR1);
2502 sport->saved_reg[1] = imx_uart_readl(sport, UCR2);
2503 sport->saved_reg[2] = imx_uart_readl(sport, UCR3);
2504 sport->saved_reg[3] = imx_uart_readl(sport, UCR4);
2505 sport->saved_reg[4] = imx_uart_readl(sport, UFCR);
2506 sport->saved_reg[5] = imx_uart_readl(sport, UESC);
2507 sport->saved_reg[6] = imx_uart_readl(sport, UTIM);
2508 sport->saved_reg[7] = imx_uart_readl(sport, UBIR);
2509 sport->saved_reg[8] = imx_uart_readl(sport, UBMR);
2510 sport->saved_reg[9] = imx_uart_readl(sport, IMX21_UTS);
2511 sport->context_saved = true;
2512 spin_unlock_irqrestore(&sport->port.lock, flags);
2515 static void imx_uart_enable_wakeup(struct imx_port *sport, bool on)
2519 ucr3 = imx_uart_readl(sport, UCR3);
2521 imx_uart_writel(sport, USR1_AWAKE, USR1);
2526 imx_uart_writel(sport, ucr3, UCR3);
2528 if (sport->have_rtscts) {
2529 u32 ucr1 = imx_uart_readl(sport, UCR1);
2531 imx_uart_writel(sport, USR1_RTSD, USR1);
2536 imx_uart_writel(sport, ucr1, UCR1);
2542 struct imx_port *sport = dev_get_drvdata(dev);
2544 imx_uart_save_context(sport);
2546 clk_disable(sport->clk_ipg);
2555 struct imx_port *sport = dev_get_drvdata(dev);
2560 ret = clk_enable(sport->clk_ipg);
2564 imx_uart_restore_context(sport);
2571 struct imx_port *sport = dev_get_drvdata(dev);
2574 uart_suspend_port(&imx_uart_uart_driver, &sport->port);
2575 disable_irq(sport->port.irq);
2577 ret = clk_prepare_enable(sport->clk_ipg);
2582 imx_uart_enable_wakeup(sport, true);
2589 struct imx_port *sport = dev_get_drvdata(dev);
2592 imx_uart_enable_wakeup(sport, false);
2594 uart_resume_port(&imx_uart_uart_driver, &sport->port);
2595 enable_irq(sport->port.irq);
2597 clk_disable_unprepare(sport->clk_ipg);
2604 struct imx_port *sport = dev_get_drvdata(dev);
2606 uart_suspend_port(&imx_uart_uart_driver, &sport->port);
2608 return clk_prepare_enable(sport->clk_ipg);
2613 struct imx_port *sport = dev_get_drvdata(dev);
2615 uart_resume_port(&imx_uart_uart_driver, &sport->port);
2617 clk_disable_unprepare(sport->clk_ipg);