Lines Matching defs:mdata

269 static void mtk_spi_reset(struct mtk_spi *mdata)
274 reg_val = readl(mdata->base + SPI_CMD_REG);
276 writel(reg_val, mdata->base + SPI_CMD_REG);
278 reg_val = readl(mdata->base + SPI_CMD_REG);
280 writel(reg_val, mdata->base + SPI_CMD_REG);
285 struct mtk_spi *mdata = spi_master_get_devdata(spi->master);
296 setup = (delay * DIV_ROUND_UP(mdata->spi_clk_hz, 1000000)) / 1000;
301 hold = (delay * DIV_ROUND_UP(mdata->spi_clk_hz, 1000000)) / 1000;
306 inactive = (delay * DIV_ROUND_UP(mdata->spi_clk_hz, 1000000)) / 1000;
309 reg_val = readl(mdata->base + SPI_CFG0_REG);
310 if (mdata->dev_comp->enhance_timing) {
336 writel(reg_val, mdata->base + SPI_CFG0_REG);
341 reg_val = readl(mdata->base + SPI_CFG1_REG);
344 writel(reg_val, mdata->base + SPI_CFG1_REG);
356 struct mtk_spi *mdata = spi_master_get_devdata(master);
361 reg_val = readl(mdata->base + SPI_CMD_REG);
362 if (mdata->dev_comp->ipm_design) {
398 if (mdata->dev_comp->enhance_timing) {
420 writel(reg_val, mdata->base + SPI_CMD_REG);
423 if (mdata->dev_comp->need_pad_sel)
424 writel(mdata->pad_sel[spi_get_chipselect(spi, 0)],
425 mdata->base + SPI_PAD_SEL_REG);
428 if (mdata->dev_comp->enhance_timing) {
429 if (mdata->dev_comp->ipm_design) {
430 reg_val = readl(mdata->base + SPI_CMD_REG);
434 writel(reg_val, mdata->base + SPI_CMD_REG);
436 reg_val = readl(mdata->base + SPI_CFG1_REG);
440 writel(reg_val, mdata->base + SPI_CFG1_REG);
443 reg_val = readl(mdata->base + SPI_CFG1_REG);
447 writel(reg_val, mdata->base + SPI_CFG1_REG);
464 struct mtk_spi *mdata = spi_master_get_devdata(spi->master);
469 reg_val = readl(mdata->base + SPI_CMD_REG);
472 writel(reg_val, mdata->base + SPI_CMD_REG);
475 writel(reg_val, mdata->base + SPI_CMD_REG);
476 mdata->state = MTK_SPI_IDLE;
477 mtk_spi_reset(mdata);
485 struct mtk_spi *mdata = spi_master_get_devdata(master);
487 if (speed_hz < mdata->spi_clk_hz / 2)
488 div = DIV_ROUND_UP(mdata->spi_clk_hz, speed_hz);
494 if (mdata->dev_comp->enhance_timing) {
495 reg_val = readl(mdata->base + SPI_CFG2_REG);
502 writel(reg_val, mdata->base + SPI_CFG2_REG);
504 reg_val = readl(mdata->base + SPI_CFG0_REG);
510 writel(reg_val, mdata->base + SPI_CFG0_REG);
517 struct mtk_spi *mdata = spi_master_get_devdata(master);
519 if (mdata->dev_comp->ipm_design)
521 mdata->xfer_len,
525 mdata->xfer_len,
528 packet_loop = mdata->xfer_len / packet_size;
530 reg_val = readl(mdata->base + SPI_CFG1_REG);
531 if (mdata->dev_comp->ipm_design)
538 writel(reg_val, mdata->base + SPI_CFG1_REG);
544 struct mtk_spi *mdata = spi_master_get_devdata(master);
546 cmd = readl(mdata->base + SPI_CMD_REG);
547 if (mdata->state == MTK_SPI_IDLE)
551 writel(cmd, mdata->base + SPI_CMD_REG);
554 static int mtk_spi_get_mult_delta(struct mtk_spi *mdata, u32 xfer_len)
558 if (mdata->dev_comp->ipm_design) {
572 struct mtk_spi *mdata = spi_master_get_devdata(master);
574 if (mdata->tx_sgl_len && mdata->rx_sgl_len) {
575 if (mdata->tx_sgl_len > mdata->rx_sgl_len) {
576 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->rx_sgl_len);
577 mdata->xfer_len = mdata->rx_sgl_len - mult_delta;
578 mdata->rx_sgl_len = mult_delta;
579 mdata->tx_sgl_len -= mdata->xfer_len;
581 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->tx_sgl_len);
582 mdata->xfer_len = mdata->tx_sgl_len - mult_delta;
583 mdata->tx_sgl_len = mult_delta;
584 mdata->rx_sgl_len -= mdata->xfer_len;
586 } else if (mdata->tx_sgl_len) {
587 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->tx_sgl_len);
588 mdata->xfer_len = mdata->tx_sgl_len - mult_delta;
589 mdata->tx_sgl_len = mult_delta;
590 } else if (mdata->rx_sgl_len) {
591 mult_delta = mtk_spi_get_mult_delta(mdata, mdata->rx_sgl_len);
592 mdata->xfer_len = mdata->rx_sgl_len - mult_delta;
593 mdata->rx_sgl_len = mult_delta;
600 struct mtk_spi *mdata = spi_master_get_devdata(master);
602 if (mdata->tx_sgl) {
604 mdata->base + SPI_TX_SRC_REG);
606 if (mdata->dev_comp->dma_ext)
608 mdata->base + SPI_TX_SRC_REG_64);
612 if (mdata->rx_sgl) {
614 mdata->base + SPI_RX_DST_REG);
616 if (mdata->dev_comp->dma_ext)
618 mdata->base + SPI_RX_DST_REG_64);
629 struct mtk_spi *mdata = spi_master_get_devdata(master);
631 mdata->cur_transfer = xfer;
632 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, xfer->len);
633 mdata->num_xfered = 0;
639 iowrite32_rep(mdata->base + SPI_TX_DATA_REG, xfer->tx_buf, cnt);
644 writel(reg_val, mdata->base + SPI_TX_DATA_REG);
658 struct mtk_spi *mdata = spi_master_get_devdata(master);
660 mdata->tx_sgl = NULL;
661 mdata->rx_sgl = NULL;
662 mdata->tx_sgl_len = 0;
663 mdata->rx_sgl_len = 0;
664 mdata->cur_transfer = xfer;
665 mdata->num_xfered = 0;
669 cmd = readl(mdata->base + SPI_CMD_REG);
674 writel(cmd, mdata->base + SPI_CMD_REG);
677 mdata->tx_sgl = xfer->tx_sg.sgl;
679 mdata->rx_sgl = xfer->rx_sg.sgl;
681 if (mdata->tx_sgl) {
682 xfer->tx_dma = sg_dma_address(mdata->tx_sgl);
683 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl);
685 if (mdata->rx_sgl) {
686 xfer->rx_dma = sg_dma_address(mdata->rx_sgl);
687 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl);
702 struct mtk_spi *mdata = spi_master_get_devdata(spi->master);
706 if (mdata->dev_comp->ipm_design) {
712 writel(reg_val, mdata->base + SPI_CFG3_IPM_REG);
733 struct mtk_spi *mdata = spi_master_get_devdata(spi->master);
738 if (mdata->dev_comp->need_pad_sel && spi_get_csgpiod(spi, 0))
749 struct mtk_spi *mdata = spi_master_get_devdata(master);
750 struct spi_transfer *trans = mdata->cur_transfer;
752 reg_val = readl(mdata->base + SPI_STATUS0_REG);
754 mdata->state = MTK_SPI_PAUSED;
756 mdata->state = MTK_SPI_IDLE;
759 if (mdata->use_spimem) {
760 complete(&mdata->spimem_done);
766 cnt = mdata->xfer_len / 4;
767 ioread32_rep(mdata->base + SPI_RX_DATA_REG,
768 trans->rx_buf + mdata->num_xfered, cnt);
769 remainder = mdata->xfer_len % 4;
771 reg_val = readl(mdata->base + SPI_RX_DATA_REG);
773 mdata->num_xfered +
780 mdata->num_xfered += mdata->xfer_len;
781 if (mdata->num_xfered == trans->len) {
786 len = trans->len - mdata->num_xfered;
787 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, len);
791 cnt = mdata->xfer_len / 4;
792 iowrite32_rep(mdata->base + SPI_TX_DATA_REG,
793 trans->tx_buf + mdata->num_xfered, cnt);
795 remainder = mdata->xfer_len % 4;
799 trans->tx_buf + (cnt * 4) + mdata->num_xfered,
801 writel(reg_val, mdata->base + SPI_TX_DATA_REG);
810 if (mdata->tx_sgl)
811 trans->tx_dma += mdata->xfer_len;
812 if (mdata->rx_sgl)
813 trans->rx_dma += mdata->xfer_len;
815 if (mdata->tx_sgl && (mdata->tx_sgl_len == 0)) {
816 mdata->tx_sgl = sg_next(mdata->tx_sgl);
817 if (mdata->tx_sgl) {
818 trans->tx_dma = sg_dma_address(mdata->tx_sgl);
819 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl);
822 if (mdata->rx_sgl && (mdata->rx_sgl_len == 0)) {
823 mdata->rx_sgl = sg_next(mdata->rx_sgl);
824 if (mdata->rx_sgl) {
825 trans->rx_dma = sg_dma_address(mdata->rx_sgl);
826 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl);
830 if (!mdata->tx_sgl && !mdata->rx_sgl) {
832 cmd = readl(mdata->base + SPI_CMD_REG);
835 writel(cmd, mdata->base + SPI_CMD_REG);
892 struct mtk_spi *mdata = spi_master_get_devdata(master);
894 writel((u32)(mdata->tx_dma & MTK_SPI_32BITS_MASK),
895 mdata->base + SPI_TX_SRC_REG);
897 if (mdata->dev_comp->dma_ext)
898 writel((u32)(mdata->tx_dma >> 32),
899 mdata->base + SPI_TX_SRC_REG_64);
903 writel((u32)(mdata->rx_dma & MTK_SPI_32BITS_MASK),
904 mdata->base + SPI_RX_DST_REG);
906 if (mdata->dev_comp->dma_ext)
907 writel((u32)(mdata->rx_dma >> 32),
908 mdata->base + SPI_RX_DST_REG_64);
916 struct mtk_spi *mdata = spi_master_get_devdata(mem->spi->master);
934 if (!wait_for_completion_timeout(&mdata->spimem_done,
936 dev_err(mdata->dev, "spi-mem transfer timeout\n");
946 struct mtk_spi *mdata = spi_master_get_devdata(mem->spi->master);
951 mdata->use_spimem = true;
952 reinit_completion(&mdata->spimem_done);
954 mtk_spi_reset(mdata);
958 reg_val = readl(mdata->base + SPI_CFG3_IPM_REG);
972 writel(0, mdata->base + SPI_CFG1_REG);
975 mdata->xfer_len = op->data.nbytes;
1005 writel(reg_val, mdata->base + SPI_CFG3_IPM_REG);
1015 mdata->use_spimem = false;
1039 mdata->tx_dma = dma_map_single(mdata->dev, tx_tmp_buf,
1041 if (dma_mapping_error(mdata->dev, mdata->tx_dma)) {
1058 mdata->rx_dma = dma_map_single(mdata->dev,
1062 if (dma_mapping_error(mdata->dev, mdata->rx_dma)) {
1068 reg_val = readl(mdata->base + SPI_CMD_REG);
1072 writel(reg_val, mdata->base + SPI_CMD_REG);
1084 reg_val = readl(mdata->base + SPI_CMD_REG);
1088 writel(reg_val, mdata->base + SPI_CMD_REG);
1092 dma_unmap_single(mdata->dev, mdata->rx_dma,
1102 dma_unmap_single(mdata->dev, mdata->tx_dma,
1106 mdata->use_spimem = false;
1121 struct mtk_spi *mdata;
1124 master = devm_spi_alloc_master(dev, sizeof(*mdata));
1140 mdata = spi_master_get_devdata(master);
1141 mdata->dev_comp = device_get_match_data(dev);
1143 if (mdata->dev_comp->enhance_timing)
1146 if (mdata->dev_comp->must_tx)
1148 if (mdata->dev_comp->ipm_design)
1152 if (mdata->dev_comp->ipm_design) {
1153 mdata->dev = dev;
1155 init_completion(&mdata->spimem_done);
1158 if (mdata->dev_comp->need_pad_sel) {
1159 mdata->pad_num = of_property_count_u32_elems(dev->of_node,
1161 if (mdata->pad_num < 0)
1165 mdata->pad_sel = devm_kmalloc_array(dev, mdata->pad_num,
1167 if (!mdata->pad_sel)
1170 for (i = 0; i < mdata->pad_num; i++) {
1173 i, &mdata->pad_sel[i]);
1174 if (mdata->pad_sel[i] > MT8173_SPI_MAX_PAD_SEL)
1177 i, mdata->pad_sel[i]);
1182 mdata->base = devm_platform_ioremap_resource(pdev, 0);
1183 if (IS_ERR(mdata->base))
1184 return PTR_ERR(mdata->base);
1193 if (mdata->dev_comp->ipm_design)
1198 mdata->parent_clk = devm_clk_get(dev, "parent-clk");
1199 if (IS_ERR(mdata->parent_clk))
1200 return dev_err_probe(dev, PTR_ERR(mdata->parent_clk),
1203 mdata->sel_clk = devm_clk_get(dev, "sel-clk");
1204 if (IS_ERR(mdata->sel_clk))
1205 return dev_err_probe(dev, PTR_ERR(mdata->sel_clk), "failed to get sel-clk\n");
1207 mdata->spi_clk = devm_clk_get(dev, "spi-clk");
1208 if (IS_ERR(mdata->spi_clk))
1209 return dev_err_probe(dev, PTR_ERR(mdata->spi_clk), "failed to get spi-clk\n");
1211 mdata->spi_hclk = devm_clk_get_optional(dev, "hclk");
1212 if (IS_ERR(mdata->spi_hclk))
1213 return dev_err_probe(dev, PTR_ERR(mdata->spi_hclk), "failed to get hclk\n");
1215 ret = clk_set_parent(mdata->sel_clk, mdata->parent_clk);
1219 ret = clk_prepare_enable(mdata->spi_hclk);
1223 ret = clk_prepare_enable(mdata->spi_clk);
1225 clk_disable_unprepare(mdata->spi_hclk);
1229 mdata->spi_clk_hz = clk_get_rate(mdata->spi_clk);
1231 if (mdata->dev_comp->no_need_unprepare) {
1232 clk_disable(mdata->spi_clk);
1233 clk_disable(mdata->spi_hclk);
1235 clk_disable_unprepare(mdata->spi_clk);
1236 clk_disable_unprepare(mdata->spi_hclk);
1239 if (mdata->dev_comp->need_pad_sel) {
1240 if (mdata->pad_num != master->num_chipselect)
1243 mdata->pad_num, master->num_chipselect);
1250 if (mdata->dev_comp->dma_ext)
1278 struct mtk_spi *mdata = spi_master_get_devdata(master);
1281 if (mdata->use_spimem && !completion_done(&mdata->spimem_done))
1282 complete(&mdata->spimem_done);
1293 mtk_spi_reset(mdata);
1295 if (mdata->dev_comp->no_need_unprepare) {
1296 clk_unprepare(mdata->spi_clk);
1297 clk_unprepare(mdata->spi_hclk);
1310 struct mtk_spi *mdata = spi_master_get_devdata(master);
1317 clk_disable_unprepare(mdata->spi_clk);
1318 clk_disable_unprepare(mdata->spi_hclk);
1328 struct mtk_spi *mdata = spi_master_get_devdata(master);
1331 ret = clk_prepare_enable(mdata->spi_clk);
1337 ret = clk_prepare_enable(mdata->spi_hclk);
1340 clk_disable_unprepare(mdata->spi_clk);
1347 clk_disable_unprepare(mdata->spi_clk);
1348 clk_disable_unprepare(mdata->spi_hclk);
1359 struct mtk_spi *mdata = spi_master_get_devdata(master);
1361 if (mdata->dev_comp->no_need_unprepare) {
1362 clk_disable(mdata->spi_clk);
1363 clk_disable(mdata->spi_hclk);
1365 clk_disable_unprepare(mdata->spi_clk);
1366 clk_disable_unprepare(mdata->spi_hclk);
1375 struct mtk_spi *mdata = spi_master_get_devdata(master);
1378 if (mdata->dev_comp->no_need_unprepare) {
1379 ret = clk_enable(mdata->spi_clk);
1384 ret = clk_enable(mdata->spi_hclk);
1387 clk_disable(mdata->spi_clk);
1391 ret = clk_prepare_enable(mdata->spi_clk);
1397 ret = clk_prepare_enable(mdata->spi_hclk);
1400 clk_disable_unprepare(mdata->spi_clk);