Lines Matching defs:mdata

181 static void mtk_spi_reset(struct mtk_spi *mdata)
186 reg_val = readl(mdata->base + SPI_CMD_REG);
188 writel(reg_val, mdata->base + SPI_CMD_REG);
190 reg_val = readl(mdata->base + SPI_CMD_REG);
192 writel(reg_val, mdata->base + SPI_CMD_REG);
202 struct mtk_spi *mdata = spi_master_get_devdata(master);
207 reg_val = readl(mdata->base + SPI_CMD_REG);
235 if (mdata->dev_comp->enhance_timing) {
257 writel(reg_val, mdata->base + SPI_CMD_REG);
260 if (mdata->dev_comp->need_pad_sel)
261 writel(mdata->pad_sel[spi->chip_select],
262 mdata->base + SPI_PAD_SEL_REG);
270 struct mtk_spi *mdata = spi_master_get_devdata(spi->master);
275 reg_val = readl(mdata->base + SPI_CMD_REG);
278 writel(reg_val, mdata->base + SPI_CMD_REG);
281 writel(reg_val, mdata->base + SPI_CMD_REG);
282 mdata->state = MTK_SPI_IDLE;
283 mtk_spi_reset(mdata);
291 struct mtk_spi *mdata = spi_master_get_devdata(master);
293 spi_clk_hz = clk_get_rate(mdata->spi_clk);
302 if (mdata->dev_comp->enhance_timing) {
307 writel(reg_val, mdata->base + SPI_CFG2_REG);
312 writel(reg_val, mdata->base + SPI_CFG0_REG);
319 writel(reg_val, mdata->base + SPI_CFG0_REG);
322 reg_val = readl(mdata->base + SPI_CFG1_REG);
325 writel(reg_val, mdata->base + SPI_CFG1_REG);
331 struct mtk_spi *mdata = spi_master_get_devdata(master);
333 packet_size = min_t(u32, mdata->xfer_len, MTK_SPI_PACKET_SIZE);
334 packet_loop = mdata->xfer_len / packet_size;
336 reg_val = readl(mdata->base + SPI_CFG1_REG);
340 writel(reg_val, mdata->base + SPI_CFG1_REG);
346 struct mtk_spi *mdata = spi_master_get_devdata(master);
348 cmd = readl(mdata->base + SPI_CMD_REG);
349 if (mdata->state == MTK_SPI_IDLE)
353 writel(cmd, mdata->base + SPI_CMD_REG);
371 struct mtk_spi *mdata = spi_master_get_devdata(master);
373 if (mdata->tx_sgl_len && mdata->rx_sgl_len) {
374 if (mdata->tx_sgl_len > mdata->rx_sgl_len) {
375 mult_delta = mtk_spi_get_mult_delta(mdata->rx_sgl_len);
376 mdata->xfer_len = mdata->rx_sgl_len - mult_delta;
377 mdata->rx_sgl_len = mult_delta;
378 mdata->tx_sgl_len -= mdata->xfer_len;
380 mult_delta = mtk_spi_get_mult_delta(mdata->tx_sgl_len);
381 mdata->xfer_len = mdata->tx_sgl_len - mult_delta;
382 mdata->tx_sgl_len = mult_delta;
383 mdata->rx_sgl_len -= mdata->xfer_len;
385 } else if (mdata->tx_sgl_len) {
386 mult_delta = mtk_spi_get_mult_delta(mdata->tx_sgl_len);
387 mdata->xfer_len = mdata->tx_sgl_len - mult_delta;
388 mdata->tx_sgl_len = mult_delta;
389 } else if (mdata->rx_sgl_len) {
390 mult_delta = mtk_spi_get_mult_delta(mdata->rx_sgl_len);
391 mdata->xfer_len = mdata->rx_sgl_len - mult_delta;
392 mdata->rx_sgl_len = mult_delta;
399 struct mtk_spi *mdata = spi_master_get_devdata(master);
401 if (mdata->tx_sgl) {
403 mdata->base + SPI_TX_SRC_REG);
405 if (mdata->dev_comp->dma_ext)
407 mdata->base + SPI_TX_SRC_REG_64);
411 if (mdata->rx_sgl) {
413 mdata->base + SPI_RX_DST_REG);
415 if (mdata->dev_comp->dma_ext)
417 mdata->base + SPI_RX_DST_REG_64);
428 struct mtk_spi *mdata = spi_master_get_devdata(master);
430 mdata->cur_transfer = xfer;
431 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, xfer->len);
432 mdata->num_xfered = 0;
438 iowrite32_rep(mdata->base + SPI_TX_DATA_REG, xfer->tx_buf, cnt);
443 writel(reg_val, mdata->base + SPI_TX_DATA_REG);
457 struct mtk_spi *mdata = spi_master_get_devdata(master);
459 mdata->tx_sgl = NULL;
460 mdata->rx_sgl = NULL;
461 mdata->tx_sgl_len = 0;
462 mdata->rx_sgl_len = 0;
463 mdata->cur_transfer = xfer;
464 mdata->num_xfered = 0;
468 cmd = readl(mdata->base + SPI_CMD_REG);
473 writel(cmd, mdata->base + SPI_CMD_REG);
476 mdata->tx_sgl = xfer->tx_sg.sgl;
478 mdata->rx_sgl = xfer->rx_sg.sgl;
480 if (mdata->tx_sgl) {
481 xfer->tx_dma = sg_dma_address(mdata->tx_sgl);
482 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl);
484 if (mdata->rx_sgl) {
485 xfer->rx_dma = sg_dma_address(mdata->rx_sgl);
486 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl);
519 struct mtk_spi *mdata = spi_master_get_devdata(spi->master);
524 if (mdata->dev_comp->need_pad_sel && gpio_is_valid(spi->cs_gpio))
534 struct mtk_spi *mdata = spi_master_get_devdata(master);
535 struct spi_transfer *trans = mdata->cur_transfer;
537 reg_val = readl(mdata->base + SPI_STATUS0_REG);
539 mdata->state = MTK_SPI_PAUSED;
541 mdata->state = MTK_SPI_IDLE;
545 cnt = mdata->xfer_len / 4;
546 ioread32_rep(mdata->base + SPI_RX_DATA_REG,
547 trans->rx_buf + mdata->num_xfered, cnt);
548 remainder = mdata->xfer_len % 4;
550 reg_val = readl(mdata->base + SPI_RX_DATA_REG);
552 mdata->num_xfered +
559 mdata->num_xfered += mdata->xfer_len;
560 if (mdata->num_xfered == trans->len) {
565 len = trans->len - mdata->num_xfered;
566 mdata->xfer_len = min(MTK_SPI_MAX_FIFO_SIZE, len);
569 cnt = mdata->xfer_len / 4;
570 iowrite32_rep(mdata->base + SPI_TX_DATA_REG,
571 trans->tx_buf + mdata->num_xfered, cnt);
573 remainder = mdata->xfer_len % 4;
577 trans->tx_buf + (cnt * 4) + mdata->num_xfered,
579 writel(reg_val, mdata->base + SPI_TX_DATA_REG);
587 if (mdata->tx_sgl)
588 trans->tx_dma += mdata->xfer_len;
589 if (mdata->rx_sgl)
590 trans->rx_dma += mdata->xfer_len;
592 if (mdata->tx_sgl && (mdata->tx_sgl_len == 0)) {
593 mdata->tx_sgl = sg_next(mdata->tx_sgl);
594 if (mdata->tx_sgl) {
595 trans->tx_dma = sg_dma_address(mdata->tx_sgl);
596 mdata->tx_sgl_len = sg_dma_len(mdata->tx_sgl);
599 if (mdata->rx_sgl && (mdata->rx_sgl_len == 0)) {
600 mdata->rx_sgl = sg_next(mdata->rx_sgl);
601 if (mdata->rx_sgl) {
602 trans->rx_dma = sg_dma_address(mdata->rx_sgl);
603 mdata->rx_sgl_len = sg_dma_len(mdata->rx_sgl);
607 if (!mdata->tx_sgl && !mdata->rx_sgl) {
609 cmd = readl(mdata->base + SPI_CMD_REG);
612 writel(cmd, mdata->base + SPI_CMD_REG);
629 struct mtk_spi *mdata;
633 master = spi_alloc_master(&pdev->dev, sizeof(*mdata));
656 mdata = spi_master_get_devdata(master);
657 mdata->dev_comp = of_id->data;
659 if (mdata->dev_comp->enhance_timing)
662 if (mdata->dev_comp->must_tx)
665 if (mdata->dev_comp->need_pad_sel) {
666 mdata->pad_num = of_property_count_u32_elems(
669 if (mdata->pad_num < 0) {
676 mdata->pad_sel = devm_kmalloc_array(&pdev->dev, mdata->pad_num,
678 if (!mdata->pad_sel) {
683 for (i = 0; i < mdata->pad_num; i++) {
686 i, &mdata->pad_sel[i]);
687 if (mdata->pad_sel[i] > MT8173_SPI_MAX_PAD_SEL) {
689 i, mdata->pad_sel[i]);
697 mdata->base = devm_platform_ioremap_resource(pdev, 0);
698 if (IS_ERR(mdata->base)) {
699 ret = PTR_ERR(mdata->base);
719 mdata->parent_clk = devm_clk_get(&pdev->dev, "parent-clk");
720 if (IS_ERR(mdata->parent_clk)) {
721 ret = PTR_ERR(mdata->parent_clk);
726 mdata->sel_clk = devm_clk_get(&pdev->dev, "sel-clk");
727 if (IS_ERR(mdata->sel_clk)) {
728 ret = PTR_ERR(mdata->sel_clk);
733 mdata->spi_clk = devm_clk_get(&pdev->dev, "spi-clk");
734 if (IS_ERR(mdata->spi_clk)) {
735 ret = PTR_ERR(mdata->spi_clk);
740 ret = clk_prepare_enable(mdata->spi_clk);
746 ret = clk_set_parent(mdata->sel_clk, mdata->parent_clk);
749 clk_disable_unprepare(mdata->spi_clk);
753 clk_disable_unprepare(mdata->spi_clk);
763 if (mdata->dev_comp->need_pad_sel) {
764 if (mdata->pad_num != master->num_chipselect) {
767 mdata->pad_num, master->num_chipselect);
793 if (mdata->dev_comp->dma_ext)
815 struct mtk_spi *mdata = spi_master_get_devdata(master);
819 mtk_spi_reset(mdata);
829 struct mtk_spi *mdata = spi_master_get_devdata(master);
836 clk_disable_unprepare(mdata->spi_clk);
845 struct mtk_spi *mdata = spi_master_get_devdata(master);
848 ret = clk_prepare_enable(mdata->spi_clk);
857 clk_disable_unprepare(mdata->spi_clk);
867 struct mtk_spi *mdata = spi_master_get_devdata(master);
869 clk_disable_unprepare(mdata->spi_clk);
877 struct mtk_spi *mdata = spi_master_get_devdata(master);
880 ret = clk_prepare_enable(mdata->spi_clk);