Lines Matching defs:aus

107 	struct at91_usart_spi   *aus = spi_master_get_devdata(ctlr);
109 at91_usart_spi_writel(aus, IER, US_IR_RXRDY);
110 aus->current_rx_remaining_bytes = 0;
111 complete(&aus->xfer_completion);
118 struct at91_usart_spi *aus = spi_master_get_devdata(ctrl);
120 return aus->use_dma && xfer->len >= US_DMA_MIN_BYTES;
124 struct at91_usart_spi *aus)
127 struct device *dev = &aus->mpdev->dev;
128 phys_addr_t phybase = aus->phybase;
185 aus->use_dma = true;
219 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
227 at91_usart_spi_writel(aus, IDR, US_IR_RXRDY);
265 at91_usart_spi_writel(aus, IER, US_IR_RXRDY);
271 static unsigned long at91_usart_spi_dma_timeout(struct at91_usart_spi *aus)
273 return wait_for_completion_timeout(&aus->xfer_completion,
277 static inline u32 at91_usart_spi_tx_ready(struct at91_usart_spi *aus)
279 return aus->status & US_IR_TXRDY;
282 static inline u32 at91_usart_spi_rx_ready(struct at91_usart_spi *aus)
284 return aus->status & US_IR_RXRDY;
287 static inline u32 at91_usart_spi_check_overrun(struct at91_usart_spi *aus)
289 return aus->status & US_IR_OVRE;
292 static inline u32 at91_usart_spi_read_status(struct at91_usart_spi *aus)
294 aus->status = at91_usart_spi_readl(aus, CSR);
295 return aus->status;
298 static inline void at91_usart_spi_tx(struct at91_usart_spi *aus)
300 unsigned int len = aus->current_transfer->len;
301 unsigned int remaining = aus->current_tx_remaining_bytes;
302 const u8 *tx_buf = aus->current_transfer->tx_buf;
307 if (at91_usart_spi_tx_ready(aus)) {
308 at91_usart_spi_writeb(aus, THR, tx_buf[len - remaining]);
309 aus->current_tx_remaining_bytes--;
313 static inline void at91_usart_spi_rx(struct at91_usart_spi *aus)
315 int len = aus->current_transfer->len;
316 int remaining = aus->current_rx_remaining_bytes;
317 u8 *rx_buf = aus->current_transfer->rx_buf;
322 rx_buf[len - remaining] = at91_usart_spi_readb(aus, RHR);
323 aus->current_rx_remaining_bytes--;
327 at91_usart_spi_set_xfer_speed(struct at91_usart_spi *aus,
330 at91_usart_spi_writel(aus, BRGR,
331 DIV_ROUND_UP(aus->spi_clk, xfer->speed_hz));
337 struct at91_usart_spi *aus = spi_master_get_devdata(controller);
339 spin_lock(&aus->lock);
340 at91_usart_spi_read_status(aus);
342 if (at91_usart_spi_check_overrun(aus)) {
343 aus->xfer_failed = true;
344 at91_usart_spi_writel(aus, IDR, US_IR_OVRE | US_IR_RXRDY);
345 spin_unlock(&aus->lock);
349 if (at91_usart_spi_rx_ready(aus)) {
350 at91_usart_spi_rx(aus);
351 spin_unlock(&aus->lock);
355 spin_unlock(&aus->lock);
362 struct at91_usart_spi *aus = spi_master_get_devdata(spi->controller);
364 unsigned int mr = at91_usart_spi_readl(aus, MR);
402 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
406 at91_usart_spi_set_xfer_speed(aus, xfer);
407 aus->xfer_failed = false;
408 aus->current_transfer = xfer;
409 aus->current_tx_remaining_bytes = xfer->len;
410 aus->current_rx_remaining_bytes = xfer->len;
412 while ((aus->current_tx_remaining_bytes ||
413 aus->current_rx_remaining_bytes) && !aus->xfer_failed) {
414 reinit_completion(&aus->xfer_completion);
421 dma_timeout = at91_usart_spi_dma_timeout(aus);
427 aus->current_tx_remaining_bytes = 0;
429 at91_usart_spi_read_status(aus);
430 at91_usart_spi_tx(aus);
436 if (aus->xfer_failed) {
437 dev_err(aus->dev, "Overrun!\n");
447 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
451 at91_usart_spi_writel(aus, CR, US_ENABLE);
452 at91_usart_spi_writel(aus, IER, US_OVRE_RXRDY_IRQS);
453 at91_usart_spi_writel(aus, MR, *ausd);
461 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
463 at91_usart_spi_writel(aus, CR, US_RESET | US_DISABLE);
464 at91_usart_spi_writel(aus, IDR, US_OVRE_RXRDY_IRQS);
477 static void at91_usart_spi_init(struct at91_usart_spi *aus)
479 at91_usart_spi_writel(aus, MR, US_INIT);
480 at91_usart_spi_writel(aus, CR, US_RESET | US_DISABLE);
516 struct at91_usart_spi *aus;
535 controller = spi_alloc_master(&pdev->dev, sizeof(*aus));
559 aus = spi_master_get_devdata(controller);
561 aus->dev = &pdev->dev;
562 aus->regs = devm_ioremap_resource(&pdev->dev, regs);
563 if (IS_ERR(aus->regs)) {
564 ret = PTR_ERR(aus->regs);
568 aus->irq = irq;
569 aus->clk = clk;
580 aus->spi_clk = clk_get_rate(clk);
581 at91_usart_spi_init(aus);
583 aus->phybase = regs->start;
585 aus->mpdev = to_platform_device(pdev->dev.parent);
587 ret = at91_usart_spi_configure_dma(controller, aus);
591 spin_lock_init(&aus->lock);
592 init_completion(&aus->xfer_completion);
600 at91_usart_spi_readl(aus, VERSION),
617 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
619 clk_disable_unprepare(aus->clk);
628 struct at91_usart_spi *aus = spi_master_get_devdata(ctrl);
632 return clk_prepare_enable(aus->clk);
653 struct at91_usart_spi *aus = spi_master_get_devdata(ctrl);
662 at91_usart_spi_init(aus);
670 struct at91_usart_spi *aus = spi_master_get_devdata(ctlr);
673 clk_disable_unprepare(aus->clk);