Lines Matching refs:xfer
164 struct spi_transfer *xfer)
172 if (xfer->rx_buf)
174 if (xfer->tx_buf)
178 cnt = xfer->len / 4;
179 if (xfer->tx_buf)
181 xfer->tx_buf, cnt);
183 remainder = xfer->len % 4;
184 if (xfer->tx_buf && remainder > 0) {
186 memcpy(®_val, xfer->tx_buf + cnt * 4, remainder);
201 struct spi_transfer *xfer)
209 if (xfer->tx_buf) {
213 void *nonconst_tx = (void *)xfer->tx_buf;
215 xfer->tx_dma = dma_map_single(dev, nonconst_tx,
216 xfer->len, DMA_TO_DEVICE);
217 if (dma_mapping_error(dev, xfer->tx_dma)) {
223 if (xfer->rx_buf) {
224 xfer->rx_dma = dma_map_single(dev, xfer->rx_buf,
225 xfer->len, DMA_FROM_DEVICE);
226 if (dma_mapping_error(dev, xfer->rx_dma)) {
232 writel(xfer->tx_dma, mdata->base + SPIS_TX_SRC_REG);
233 writel(xfer->rx_dma, mdata->base + SPIS_RX_DST_REG);
239 if (xfer->tx_buf)
241 if (xfer->rx_buf)
247 reg_val |= (xfer->len - 1) & TX_DMA_LEN;
251 if (xfer->tx_buf)
253 if (xfer->rx_buf)
265 if (xfer->rx_buf)
266 dma_unmap_single(dev, xfer->rx_dma,
267 xfer->len, DMA_FROM_DEVICE);
270 if (xfer->tx_buf)
271 dma_unmap_single(dev, xfer->tx_dma,
272 xfer->len, DMA_TO_DEVICE);
284 struct spi_transfer *xfer)
290 mdata->cur_transfer = xfer;
292 if (xfer->len > mdata->dev_comp->max_fifo_size)
293 return mtk_spi_slave_dma_transfer(ctlr, spi, xfer);
295 return mtk_spi_slave_fifo_transfer(ctlr, spi, xfer);