Lines Matching defs:qspi
94 struct stm32_qspi *qspi;
128 struct stm32_qspi *qspi = (struct stm32_qspi *)dev_id;
131 sr = readl_relaxed(qspi->io_base + QSPI_SR);
135 cr = readl_relaxed(qspi->io_base + QSPI_CR);
137 writel_relaxed(cr, qspi->io_base + QSPI_CR);
138 complete(&qspi->data_completion);
154 static int stm32_qspi_tx_poll(struct stm32_qspi *qspi,
172 ret = readl_relaxed_poll_timeout_atomic(qspi->io_base + QSPI_SR,
176 dev_err(qspi->dev, "fifo timeout (len:%d stat:%#x)\n",
180 tx_fifo(buf++, qspi->io_base + QSPI_DR);
186 static int stm32_qspi_tx_mm(struct stm32_qspi *qspi,
189 memcpy_fromio(op->data.buf.in, qspi->mm_base + op->addr.val,
201 static int stm32_qspi_tx_dma(struct stm32_qspi *qspi,
214 dma_ch = qspi->dma_chrx;
217 dma_ch = qspi->dma_chtx;
224 err = spi_controller_dma_map_mem_op_data(qspi->ctrl, op, &sgt);
235 cr = readl_relaxed(qspi->io_base + QSPI_CR);
237 reinit_completion(&qspi->dma_completion);
239 desc->callback_param = &qspi->dma_completion;
247 writel_relaxed(cr | CR_DMAEN, qspi->io_base + QSPI_CR);
250 if (!wait_for_completion_timeout(&qspi->dma_completion,
258 writel_relaxed(cr & ~CR_DMAEN, qspi->io_base + QSPI_CR);
260 spi_controller_dma_unmap_mem_op_data(qspi->ctrl, op, &sgt);
265 static int stm32_qspi_tx(struct stm32_qspi *qspi, const struct spi_mem_op *op)
270 if (qspi->fmode == CCR_FMODE_MM)
271 return stm32_qspi_tx_mm(qspi, op);
272 else if ((op->data.dir == SPI_MEM_DATA_IN && qspi->dma_chrx) ||
273 (op->data.dir == SPI_MEM_DATA_OUT && qspi->dma_chtx))
274 if (!stm32_qspi_tx_dma(qspi, op))
277 return stm32_qspi_tx_poll(qspi, op);
280 static int stm32_qspi_wait_nobusy(struct stm32_qspi *qspi)
284 return readl_relaxed_poll_timeout_atomic(qspi->io_base + QSPI_SR, sr,
289 static int stm32_qspi_wait_cmd(struct stm32_qspi *qspi,
298 if ((readl_relaxed(qspi->io_base + QSPI_SR) & SR_TCF) ||
299 qspi->fmode == CCR_FMODE_APM)
302 reinit_completion(&qspi->data_completion);
303 cr = readl_relaxed(qspi->io_base + QSPI_CR);
304 writel_relaxed(cr | CR_TCIE | CR_TEIE, qspi->io_base + QSPI_CR);
306 if (!wait_for_completion_timeout(&qspi->data_completion,
310 sr = readl_relaxed(qspi->io_base + QSPI_SR);
317 writel_relaxed(FCR_CTCF | FCR_CTEF, qspi->io_base + QSPI_FCR);
320 err = stm32_qspi_wait_nobusy(qspi);
325 static int stm32_qspi_get_mode(struct stm32_qspi *qspi, u8 buswidth)
335 struct stm32_qspi *qspi = spi_controller_get_devdata(mem->spi->master);
336 struct stm32_qspi_flash *flash = &qspi->flash[mem->spi->chip_select];
340 dev_dbg(qspi->dev, "cmd:%#x mode:%d.%d.%d.%d addr:%#llx len:%#x\n",
345 err = stm32_qspi_wait_nobusy(qspi);
352 if (addr_max < qspi->mm_size &&
354 qspi->fmode = CCR_FMODE_MM;
356 qspi->fmode = CCR_FMODE_INDR;
358 qspi->fmode = CCR_FMODE_INDW;
361 cr = readl_relaxed(qspi->io_base + QSPI_CR);
365 writel_relaxed(cr, qspi->io_base + QSPI_CR);
369 qspi->io_base + QSPI_DLR);
371 qspi->fmode = CCR_FMODE_INDW;
373 ccr = qspi->fmode;
376 stm32_qspi_get_mode(qspi, op->cmd.buswidth));
380 stm32_qspi_get_mode(qspi, op->addr.buswidth));
390 stm32_qspi_get_mode(qspi, op->data.buswidth));
393 writel_relaxed(ccr, qspi->io_base + QSPI_CCR);
395 if (op->addr.nbytes && qspi->fmode != CCR_FMODE_MM)
396 writel_relaxed(op->addr.val, qspi->io_base + QSPI_AR);
398 err = stm32_qspi_tx(qspi, op);
407 if (err || qspi->fmode == CCR_FMODE_MM)
411 err = stm32_qspi_wait_cmd(qspi, op);
418 cr = readl_relaxed(qspi->io_base + QSPI_CR) | CR_ABORT;
419 writel_relaxed(cr, qspi->io_base + QSPI_CR);
422 timeout = readl_relaxed_poll_timeout_atomic(qspi->io_base + QSPI_CR,
426 writel_relaxed(FCR_CTCF, qspi->io_base + QSPI_FCR);
429 dev_err(qspi->dev, "%s err:%d abort timeout:%d\n",
437 struct stm32_qspi *qspi = spi_controller_get_devdata(mem->spi->master);
440 ret = pm_runtime_get_sync(qspi->dev);
442 pm_runtime_put_noidle(qspi->dev);
446 mutex_lock(&qspi->lock);
448 mutex_unlock(&qspi->lock);
450 pm_runtime_mark_last_busy(qspi->dev);
451 pm_runtime_put_autosuspend(qspi->dev);
459 struct stm32_qspi *qspi = spi_controller_get_devdata(ctrl);
470 ret = pm_runtime_get_sync(qspi->dev);
472 pm_runtime_put_noidle(qspi->dev);
476 presc = DIV_ROUND_UP(qspi->clk_rate, spi->max_speed_hz) - 1;
478 flash = &qspi->flash[spi->chip_select];
479 flash->qspi = qspi;
483 mutex_lock(&qspi->lock);
484 qspi->cr_reg = 3 << CR_FTHRES_SHIFT | CR_SSHIFT | CR_EN;
485 writel_relaxed(qspi->cr_reg, qspi->io_base + QSPI_CR);
488 qspi->dcr_reg = DCR_FSIZE_MASK;
489 writel_relaxed(qspi->dcr_reg, qspi->io_base + QSPI_DCR);
490 mutex_unlock(&qspi->lock);
492 pm_runtime_mark_last_busy(qspi->dev);
493 pm_runtime_put_autosuspend(qspi->dev);
498 static int stm32_qspi_dma_setup(struct stm32_qspi *qspi)
501 struct device *dev = qspi->dev;
508 dma_cfg.src_addr = qspi->phys_base + QSPI_DR;
509 dma_cfg.dst_addr = qspi->phys_base + QSPI_DR;
513 qspi->dma_chrx = dma_request_chan(dev, "rx");
514 if (IS_ERR(qspi->dma_chrx)) {
515 ret = PTR_ERR(qspi->dma_chrx);
516 qspi->dma_chrx = NULL;
520 if (dmaengine_slave_config(qspi->dma_chrx, &dma_cfg)) {
522 dma_release_channel(qspi->dma_chrx);
523 qspi->dma_chrx = NULL;
527 qspi->dma_chtx = dma_request_chan(dev, "tx");
528 if (IS_ERR(qspi->dma_chtx)) {
529 ret = PTR_ERR(qspi->dma_chtx);
530 qspi->dma_chtx = NULL;
532 if (dmaengine_slave_config(qspi->dma_chtx, &dma_cfg)) {
534 dma_release_channel(qspi->dma_chtx);
535 qspi->dma_chtx = NULL;
540 init_completion(&qspi->dma_completion);
548 static void stm32_qspi_dma_free(struct stm32_qspi *qspi)
550 if (qspi->dma_chtx)
551 dma_release_channel(qspi->dma_chtx);
552 if (qspi->dma_chrx)
553 dma_release_channel(qspi->dma_chrx);
569 struct stm32_qspi *qspi;
573 ctrl = spi_alloc_master(dev, sizeof(*qspi));
577 qspi = spi_controller_get_devdata(ctrl);
578 qspi->ctrl = ctrl;
580 res = platform_get_resource_byname(pdev, IORESOURCE_MEM, "qspi");
581 qspi->io_base = devm_ioremap_resource(dev, res);
582 if (IS_ERR(qspi->io_base)) {
583 ret = PTR_ERR(qspi->io_base);
587 qspi->phys_base = res->start;
590 qspi->mm_base = devm_ioremap_resource(dev, res);
591 if (IS_ERR(qspi->mm_base)) {
592 ret = PTR_ERR(qspi->mm_base);
596 qspi->mm_size = resource_size(res);
597 if (qspi->mm_size > STM32_QSPI_MAX_MMAP_SZ) {
609 dev_name(dev), qspi);
615 init_completion(&qspi->data_completion);
617 qspi->clk = devm_clk_get(dev, NULL);
618 if (IS_ERR(qspi->clk)) {
619 ret = PTR_ERR(qspi->clk);
623 qspi->clk_rate = clk_get_rate(qspi->clk);
624 if (!qspi->clk_rate) {
629 ret = clk_prepare_enable(qspi->clk);
646 qspi->dev = dev;
647 platform_set_drvdata(pdev, qspi);
648 ret = stm32_qspi_dma_setup(qspi);
652 mutex_init(&qspi->lock);
678 pm_runtime_get_sync(qspi->dev);
679 /* disable qspi */
680 writel_relaxed(0, qspi->io_base + QSPI_CR);
681 mutex_destroy(&qspi->lock);
682 pm_runtime_put_noidle(qspi->dev);
683 pm_runtime_disable(qspi->dev);
684 pm_runtime_set_suspended(qspi->dev);
685 pm_runtime_dont_use_autosuspend(qspi->dev);
687 stm32_qspi_dma_free(qspi);
689 clk_disable_unprepare(qspi->clk);
691 spi_master_put(qspi->ctrl);
698 struct stm32_qspi *qspi = platform_get_drvdata(pdev);
700 pm_runtime_get_sync(qspi->dev);
701 /* disable qspi */
702 writel_relaxed(0, qspi->io_base + QSPI_CR);
703 stm32_qspi_dma_free(qspi);
704 mutex_destroy(&qspi->lock);
705 pm_runtime_put_noidle(qspi->dev);
706 pm_runtime_disable(qspi->dev);
707 pm_runtime_set_suspended(qspi->dev);
708 pm_runtime_dont_use_autosuspend(qspi->dev);
709 clk_disable_unprepare(qspi->clk);
716 struct stm32_qspi *qspi = dev_get_drvdata(dev);
718 clk_disable_unprepare(qspi->clk);
725 struct stm32_qspi *qspi = dev_get_drvdata(dev);
727 return clk_prepare_enable(qspi->clk);
739 struct stm32_qspi *qspi = dev_get_drvdata(dev);
754 writel_relaxed(qspi->cr_reg, qspi->io_base + QSPI_CR);
755 writel_relaxed(qspi->dcr_reg, qspi->io_base + QSPI_DCR);
770 {.compatible = "st,stm32f469-qspi"},
779 .name = "stm32-qspi",