Lines Matching defs:sdd
203 static void s3c64xx_flush_fifo(struct s3c64xx_spi_driver_data *sdd)
205 void __iomem *regs = sdd->regs;
224 } while (TX_FIFO_LVL(val, sdd) && loops--);
227 dev_warn(&sdd->pdev->dev, "Timed out flushing TX FIFO\n");
233 if (RX_FIFO_LVL(val, sdd))
240 dev_warn(&sdd->pdev->dev, "Timed out flushing RX FIFO\n");
253 struct s3c64xx_spi_driver_data *sdd;
258 sdd = container_of(data,
261 sdd = container_of(data,
264 spin_lock_irqsave(&sdd->lock, flags);
267 sdd->state &= ~RXBUSY;
268 if (!(sdd->state & TXBUSY))
269 complete(&sdd->xfer_completion);
271 sdd->state &= ~TXBUSY;
272 if (!(sdd->state & RXBUSY))
273 complete(&sdd->xfer_completion);
276 spin_unlock_irqrestore(&sdd->lock, flags);
282 struct s3c64xx_spi_driver_data *sdd;
290 sdd = container_of((void *)dma,
293 config.src_addr = sdd->sfr_start + S3C64XX_SPI_RX_DATA;
294 config.src_addr_width = sdd->cur_bpw / 8;
298 sdd = container_of((void *)dma,
301 config.dst_addr = sdd->sfr_start + S3C64XX_SPI_TX_DATA;
302 config.dst_addr_width = sdd->cur_bpw / 8;
310 dev_err(&sdd->pdev->dev, "unable to prepare %s scatterlist",
321 dev_err(&sdd->pdev->dev, "DMA submission failed");
331 struct s3c64xx_spi_driver_data *sdd =
334 if (sdd->cntrlr_info->no_cs)
338 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) {
339 writel(0, sdd->regs + S3C64XX_SPI_CS_REG);
341 u32 ssel = readl(sdd->regs + S3C64XX_SPI_CS_REG);
345 writel(ssel, sdd->regs + S3C64XX_SPI_CS_REG);
348 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO))
350 sdd->regs + S3C64XX_SPI_CS_REG);
356 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(spi);
358 if (is_polling(sdd))
362 sdd->rx_dma.ch = dma_request_chan(&sdd->pdev->dev, "rx");
363 if (IS_ERR(sdd->rx_dma.ch)) {
364 dev_err(&sdd->pdev->dev, "Failed to get RX DMA channel\n");
365 sdd->rx_dma.ch = NULL;
369 sdd->tx_dma.ch = dma_request_chan(&sdd->pdev->dev, "tx");
370 if (IS_ERR(sdd->tx_dma.ch)) {
371 dev_err(&sdd->pdev->dev, "Failed to get TX DMA channel\n");
372 dma_release_channel(sdd->rx_dma.ch);
373 sdd->tx_dma.ch = NULL;
374 sdd->rx_dma.ch = NULL;
378 spi->dma_rx = sdd->rx_dma.ch;
379 spi->dma_tx = sdd->tx_dma.ch;
386 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(spi);
388 if (is_polling(sdd))
392 if (sdd->rx_dma.ch && sdd->tx_dma.ch) {
393 dma_release_channel(sdd->rx_dma.ch);
394 dma_release_channel(sdd->tx_dma.ch);
395 sdd->rx_dma.ch = NULL;
396 sdd->tx_dma.ch = NULL;
406 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(host);
408 if (sdd->rx_dma.ch && sdd->tx_dma.ch) {
409 return xfer->len > (FIFO_LVL_MASK(sdd) >> 1) + 1;
416 static int s3c64xx_enable_datapath(struct s3c64xx_spi_driver_data *sdd,
419 void __iomem *regs = sdd->regs;
437 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff)
443 sdd->state |= TXBUSY;
447 ret = prepare_dma(&sdd->tx_dma, &xfer->tx_sg);
449 switch (sdd->cur_bpw) {
467 sdd->state |= RXBUSY;
469 if (sdd->port_conf->high_speed && sdd->cur_speed >= 30000000UL
470 && !(sdd->cur_mode & SPI_CPHA))
476 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff)
479 ret = prepare_dma(&sdd->rx_dma, &xfer->rx_sg);
492 static u32 s3c64xx_spi_wait_for_timeout(struct s3c64xx_spi_driver_data *sdd,
495 void __iomem *regs = sdd->regs;
500 u32 max_fifo = (FIFO_LVL_MASK(sdd) >> 1) + 1;
507 } while (RX_FIFO_LVL(status, sdd) < max_fifo && --val);
510 return RX_FIFO_LVL(status, sdd);
513 static int s3c64xx_wait_for_dma(struct s3c64xx_spi_driver_data *sdd,
516 void __iomem *regs = sdd->regs;
522 ms = xfer->len * 8 * 1000 / sdd->cur_speed;
527 val = wait_for_completion_timeout(&sdd->xfer_completion, val);
541 while ((TX_FIFO_LVL(status, sdd)
542 || !S3C64XX_SPI_ST_TX_DONE(status, sdd))
557 static int s3c64xx_wait_for_pio(struct s3c64xx_spi_driver_data *sdd,
560 void __iomem *regs = sdd->regs;
570 time_us = (xfer->len * 8 * 1000 * 1000) / sdd->cur_speed;
576 if (RX_FIFO_LVL(status, sdd) < xfer->len)
581 if (!wait_for_completion_timeout(&sdd->xfer_completion, val))
588 } while (RX_FIFO_LVL(status, sdd) < xfer->len && --val);
595 sdd->state &= ~TXBUSY;
607 loops = xfer->len / ((FIFO_LVL_MASK(sdd) >> 1) + 1);
611 cpy_len = s3c64xx_spi_wait_for_timeout(sdd,
614 switch (sdd->cur_bpw) {
631 sdd->state &= ~RXBUSY;
636 static int s3c64xx_spi_config(struct s3c64xx_spi_driver_data *sdd)
638 void __iomem *regs = sdd->regs;
641 int div = sdd->port_conf->clk_div;
644 if (!sdd->port_conf->clk_from_cmu) {
656 if (sdd->cur_mode & SPI_CPOL)
659 if (sdd->cur_mode & SPI_CPHA)
669 switch (sdd->cur_bpw) {
684 if ((sdd->cur_mode & SPI_LOOP) && sdd->port_conf->has_loopback)
691 if (sdd->port_conf->clk_from_cmu) {
692 ret = clk_set_rate(sdd->src_clk, sdd->cur_speed * div);
695 sdd->cur_speed = clk_get_rate(sdd->src_clk) / div;
700 val |= ((clk_get_rate(sdd->src_clk) / sdd->cur_speed / div - 1)
718 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(host);
725 writel(0, sdd->regs + S3C64XX_SPI_FB_CLK);
727 writel(cs->fb_delay & 0x3, sdd->regs + S3C64XX_SPI_FB_CLK);
743 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(host);
744 const unsigned int fifo_len = (FIFO_LVL_MASK(sdd) >> 1) + 1;
757 reinit_completion(&sdd->xfer_completion);
763 if (bpw != sdd->cur_bpw || speed != sdd->cur_speed) {
764 sdd->cur_bpw = bpw;
765 sdd->cur_speed = speed;
766 sdd->cur_mode = spi->mode;
767 status = s3c64xx_spi_config(sdd);
772 if (!is_polling(sdd) && (xfer->len > fifo_len) &&
773 sdd->rx_dma.ch && sdd->tx_dma.ch) {
790 reinit_completion(&sdd->xfer_completion);
804 val = readl(sdd->regs + S3C64XX_SPI_MODE_CFG);
807 writel(val, sdd->regs + S3C64XX_SPI_MODE_CFG);
810 val = readl(sdd->regs + S3C64XX_SPI_INT_EN);
812 sdd->regs + S3C64XX_SPI_INT_EN);
816 spin_lock_irqsave(&sdd->lock, flags);
819 sdd->state &= ~RXBUSY;
820 sdd->state &= ~TXBUSY;
825 status = s3c64xx_enable_datapath(sdd, xfer, use_dma);
827 spin_unlock_irqrestore(&sdd->lock, flags);
835 status = s3c64xx_wait_for_dma(sdd, xfer);
837 status = s3c64xx_wait_for_pio(sdd, xfer, use_irq);
843 (sdd->state & RXBUSY) ? 'f' : 'p',
844 (sdd->state & TXBUSY) ? 'f' : 'p',
850 if (xfer->tx_buf && (sdd->state & TXBUSY)) {
851 dmaengine_pause(sdd->tx_dma.ch);
852 dmaengine_tx_status(sdd->tx_dma.ch, sdd->tx_dma.cookie, &s);
853 dmaengine_terminate_all(sdd->tx_dma.ch);
857 if (xfer->rx_buf && (sdd->state & RXBUSY)) {
858 dmaengine_pause(sdd->rx_dma.ch);
859 dmaengine_tx_status(sdd->rx_dma.ch, sdd->rx_dma.cookie, &s);
860 dmaengine_terminate_all(sdd->rx_dma.ch);
865 s3c64xx_flush_fifo(sdd);
931 struct s3c64xx_spi_driver_data *sdd;
935 sdd = spi_controller_get_devdata(spi->controller);
950 pm_runtime_get_sync(&sdd->pdev->dev);
952 div = sdd->port_conf->clk_div;
955 if (!sdd->port_conf->clk_from_cmu) {
959 speed = clk_get_rate(sdd->src_clk) / div / (0 + 1);
964 psr = clk_get_rate(sdd->src_clk) / div / spi->max_speed_hz - 1;
969 speed = clk_get_rate(sdd->src_clk) / div / (psr + 1);
979 speed = clk_get_rate(sdd->src_clk) / div / (psr + 1);
990 pm_runtime_mark_last_busy(&sdd->pdev->dev);
991 pm_runtime_put_autosuspend(&sdd->pdev->dev);
997 pm_runtime_mark_last_busy(&sdd->pdev->dev);
998 pm_runtime_put_autosuspend(&sdd->pdev->dev);
1024 struct s3c64xx_spi_driver_data *sdd = data;
1025 struct spi_controller *spi = sdd->host;
1028 val = readl(sdd->regs + S3C64XX_SPI_STATUS);
1048 complete(&sdd->xfer_completion);
1050 val = readl(sdd->regs + S3C64XX_SPI_INT_EN);
1052 sdd->regs + S3C64XX_SPI_INT_EN);
1056 writel(clr, sdd->regs + S3C64XX_SPI_PENDING_CLR);
1057 writel(0, sdd->regs + S3C64XX_SPI_PENDING_CLR);
1062 static void s3c64xx_spi_hwinit(struct s3c64xx_spi_driver_data *sdd)
1064 struct s3c64xx_spi_info *sci = sdd->cntrlr_info;
1065 void __iomem *regs = sdd->regs;
1068 sdd->cur_speed = 0;
1071 writel(0, sdd->regs + S3C64XX_SPI_CS_REG);
1072 else if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO))
1073 writel(S3C64XX_SPI_CS_SIG_INACT, sdd->regs + S3C64XX_SPI_CS_REG);
1078 if (!sdd->port_conf->clk_from_cmu)
1100 s3c64xx_flush_fifo(sdd);
1152 struct s3c64xx_spi_driver_data *sdd;
1172 host = devm_spi_alloc_host(&pdev->dev, sizeof(*sdd));
1179 sdd = spi_controller_get_devdata(host);
1180 sdd->port_conf = s3c64xx_spi_get_port_config(pdev);
1181 sdd->host = host;
1182 sdd->cntrlr_info = sci;
1183 sdd->pdev = pdev;
1189 sdd->port_id = ret;
1191 sdd->port_id = pdev->id;
1194 sdd->cur_bpw = 8;
1196 sdd->tx_dma.direction = DMA_MEM_TO_DEV;
1197 sdd->rx_dma.direction = DMA_DEV_TO_MEM;
1200 host->bus_num = sdd->port_id;
1215 if (sdd->port_conf->has_loopback)
1218 if (!is_polling(sdd))
1221 sdd->regs = devm_platform_get_and_ioremap_resource(pdev, 0, &mem_res);
1222 if (IS_ERR(sdd->regs))
1223 return PTR_ERR(sdd->regs);
1224 sdd->sfr_start = mem_res->start;
1231 sdd->clk = devm_clk_get_enabled(&pdev->dev, "spi");
1232 if (IS_ERR(sdd->clk))
1233 return dev_err_probe(&pdev->dev, PTR_ERR(sdd->clk),
1237 sdd->src_clk = devm_clk_get_enabled(&pdev->dev, clk_name);
1238 if (IS_ERR(sdd->src_clk))
1239 return dev_err_probe(&pdev->dev, PTR_ERR(sdd->src_clk),
1243 if (sdd->port_conf->clk_ioclk) {
1244 sdd->ioclk = devm_clk_get_enabled(&pdev->dev, "spi_ioclk");
1245 if (IS_ERR(sdd->ioclk))
1246 return dev_err_probe(&pdev->dev, PTR_ERR(sdd->ioclk),
1257 s3c64xx_spi_hwinit(sdd);
1259 spin_lock_init(&sdd->lock);
1260 init_completion(&sdd->xfer_completion);
1263 "spi-s3c64xx", sdd);
1272 sdd->regs + S3C64XX_SPI_INT_EN);
1281 sdd->port_id, host->num_chipselect);
1283 mem_res, (FIFO_LVL_MASK(sdd) >> 1) + 1);
1301 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(host);
1305 writel(0, sdd->regs + S3C64XX_SPI_INT_EN);
1307 if (!is_polling(sdd)) {
1308 dma_release_channel(sdd->rx_dma.ch);
1309 dma_release_channel(sdd->tx_dma.ch);
1321 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(host);
1331 sdd->cur_speed = 0; /* Output Clock is stopped */
1339 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(host);
1340 struct s3c64xx_spi_info *sci = sdd->cntrlr_info;
1358 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(host);
1360 clk_disable_unprepare(sdd->clk);
1361 clk_disable_unprepare(sdd->src_clk);
1362 clk_disable_unprepare(sdd->ioclk);
1370 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(host);
1373 if (sdd->port_conf->clk_ioclk) {
1374 ret = clk_prepare_enable(sdd->ioclk);
1379 ret = clk_prepare_enable(sdd->src_clk);
1383 ret = clk_prepare_enable(sdd->clk);
1387 s3c64xx_spi_hwinit(sdd);
1391 sdd->regs + S3C64XX_SPI_INT_EN);
1396 clk_disable_unprepare(sdd->src_clk);
1398 clk_disable_unprepare(sdd->ioclk);