Lines Matching refs:ctlr

89  * @ctlr: SPI controller reverse lookup
123 struct spi_controller *ctlr;
400 spi_finalize_current_transfer(bs->ctlr);
406 static int bcm2835_spi_transfer_one_irq(struct spi_controller *ctlr,
411 struct bcm2835_spi *bs = spi_controller_get_devdata(ctlr);
437 * @ctlr: SPI host controller
481 static void bcm2835_spi_transfer_prologue(struct spi_controller *ctlr,
526 dma_sync_single_for_device(ctlr->dma_rx->device->dev,
605 struct spi_controller *ctlr = data;
606 struct bcm2835_spi *bs = spi_controller_get_devdata(ctlr);
613 dmaengine_terminate_async(ctlr->dma_tx);
622 spi_finalize_current_transfer(ctlr);
633 struct spi_controller *ctlr = data;
634 struct bcm2835_spi *bs = spi_controller_get_devdata(ctlr);
649 dmaengine_terminate_async(ctlr->dma_rx);
653 spi_finalize_current_transfer(ctlr);
658 * @ctlr: SPI host controller
667 static int bcm2835_spi_prepare_sg(struct spi_controller *ctlr,
684 chan = ctlr->dma_tx;
690 chan = ctlr->dma_rx;
706 desc->callback_param = ctlr;
709 desc->callback_param = ctlr;
721 * @ctlr: SPI host controller
766 static int bcm2835_spi_transfer_one_dma(struct spi_controller *ctlr,
771 struct bcm2835_spi *bs = spi_controller_get_devdata(ctlr);
782 bcm2835_spi_transfer_prologue(ctlr, tfr, bs, cs);
786 ret = bcm2835_spi_prepare_sg(ctlr, tfr, bs, target, true);
805 dma_async_issue_pending(ctlr->dma_tx);
812 ret = bcm2835_spi_prepare_sg(ctlr, tfr, bs, target, false);
819 dmaengine_terminate_sync(ctlr->dma_tx);
825 dma_async_issue_pending(ctlr->dma_rx);
835 dmaengine_terminate_async(ctlr->dma_rx);
848 static bool bcm2835_spi_can_dma(struct spi_controller *ctlr,
860 static void bcm2835_dma_release(struct spi_controller *ctlr,
863 if (ctlr->dma_tx) {
864 dmaengine_terminate_sync(ctlr->dma_tx);
870 dma_unmap_page_attrs(ctlr->dma_tx->device->dev,
875 dma_release_channel(ctlr->dma_tx);
876 ctlr->dma_tx = NULL;
879 if (ctlr->dma_rx) {
880 dmaengine_terminate_sync(ctlr->dma_rx);
881 dma_release_channel(ctlr->dma_rx);
882 ctlr->dma_rx = NULL;
886 static int bcm2835_dma_init(struct spi_controller *ctlr, struct device *dev,
895 addr = of_get_address(ctlr->dev.of_node, 0, NULL, NULL);
904 ctlr->dma_tx = dma_request_chan(dev, "tx");
905 if (IS_ERR(ctlr->dma_tx)) {
906 ret = dev_err_probe(dev, PTR_ERR(ctlr->dma_tx),
908 ctlr->dma_tx = NULL;
911 ctlr->dma_rx = dma_request_chan(dev, "rx");
912 if (IS_ERR(ctlr->dma_rx)) {
913 ret = dev_err_probe(dev, PTR_ERR(ctlr->dma_rx),
915 ctlr->dma_rx = NULL;
927 ret = dmaengine_slave_config(ctlr->dma_tx, &slave_config);
931 bs->fill_tx_addr = dma_map_page_attrs(ctlr->dma_tx->device->dev,
935 if (dma_mapping_error(ctlr->dma_tx->device->dev, bs->fill_tx_addr)) {
942 bs->fill_tx_desc = dmaengine_prep_dma_cyclic(ctlr->dma_tx,
968 ret = dmaengine_slave_config(ctlr->dma_rx, &slave_config);
973 ctlr->can_dma = bcm2835_spi_can_dma;
981 bcm2835_dma_release(ctlr, bs);
993 static int bcm2835_spi_transfer_one_poll(struct spi_controller *ctlr,
998 struct bcm2835_spi *bs = spi_controller_get_devdata(ctlr);
1037 return bcm2835_spi_transfer_one_irq(ctlr, spi,
1048 static int bcm2835_spi_transfer_one(struct spi_controller *ctlr,
1052 struct bcm2835_spi *bs = spi_controller_get_devdata(ctlr);
1097 return bcm2835_spi_transfer_one_poll(ctlr, spi, tfr, cs);
1103 if (ctlr->can_dma && bcm2835_spi_can_dma(ctlr, spi, tfr))
1104 return bcm2835_spi_transfer_one_dma(ctlr, tfr, target, cs);
1107 return bcm2835_spi_transfer_one_irq(ctlr, spi, tfr, cs, true);
1110 static int bcm2835_spi_prepare_message(struct spi_controller *ctlr,
1114 struct bcm2835_spi *bs = spi_controller_get_devdata(ctlr);
1118 if (ctlr->can_dma) {
1124 ret = spi_split_transfers_maxsize(ctlr, msg, 65532,
1139 static void bcm2835_spi_handle_err(struct spi_controller *ctlr,
1142 struct bcm2835_spi *bs = spi_controller_get_devdata(ctlr);
1145 if (ctlr->dma_tx) {
1146 dmaengine_terminate_sync(ctlr->dma_tx);
1149 if (ctlr->dma_rx) {
1150 dmaengine_terminate_sync(ctlr->dma_rx);
1167 struct spi_controller *ctlr = spi->controller;
1173 dma_unmap_single(ctlr->dma_rx->device->dev,
1181 static int bcm2835_spi_setup_dma(struct spi_controller *ctlr,
1188 if (!ctlr->dma_rx)
1191 target->clear_rx_addr = dma_map_single(ctlr->dma_rx->device->dev,
1195 if (dma_mapping_error(ctlr->dma_rx->device->dev, target->clear_rx_addr)) {
1201 target->clear_rx_desc = dmaengine_prep_dma_cyclic(ctlr->dma_rx,
1221 struct spi_controller *ctlr = spi->controller;
1222 struct bcm2835_spi *bs = spi_controller_get_devdata(ctlr);
1236 ret = bcm2835_spi_setup_dma(ctlr, spi, bs, target);
1258 if (ctlr->dma_rx) {
1262 dma_sync_single_for_device(ctlr->dma_rx->device->dev,
1327 struct spi_controller *ctlr;
1331 ctlr = devm_spi_alloc_host(&pdev->dev, sizeof(*bs));
1332 if (!ctlr)
1335 platform_set_drvdata(pdev, ctlr);
1337 ctlr->use_gpio_descriptors = true;
1338 ctlr->mode_bits = BCM2835_SPI_MODE_BITS;
1339 ctlr->bits_per_word_mask = SPI_BPW_MASK(8);
1340 ctlr->num_chipselect = 3;
1341 ctlr->setup = bcm2835_spi_setup;
1342 ctlr->cleanup = bcm2835_spi_cleanup;
1343 ctlr->transfer_one = bcm2835_spi_transfer_one;
1344 ctlr->handle_err = bcm2835_spi_handle_err;
1345 ctlr->prepare_message = bcm2835_spi_prepare_message;
1346 ctlr->dev.of_node = pdev->dev.of_node;
1348 bs = spi_controller_get_devdata(ctlr);
1349 bs->ctlr = ctlr;
1360 ctlr->max_speed_hz = clk_get_rate(bs->clk) / 2;
1371 err = bcm2835_dma_init(ctlr, &pdev->dev, bs);
1386 err = spi_register_controller(ctlr);
1398 bcm2835_dma_release(ctlr, bs);
1406 struct spi_controller *ctlr = platform_get_drvdata(pdev);
1407 struct bcm2835_spi *bs = spi_controller_get_devdata(ctlr);
1411 spi_unregister_controller(ctlr);
1413 bcm2835_dma_release(ctlr, bs);