Lines Matching defs:idev

152 static void i2c_int_disable(struct axxia_i2c_dev *idev, u32 mask)
156 int_en = readl(idev->base + MST_INT_ENABLE);
157 writel(int_en & ~mask, idev->base + MST_INT_ENABLE);
160 static void i2c_int_enable(struct axxia_i2c_dev *idev, u32 mask)
164 int_en = readl(idev->base + MST_INT_ENABLE);
165 writel(int_en | mask, idev->base + MST_INT_ENABLE);
176 static int axxia_i2c_init(struct axxia_i2c_dev *idev)
178 u32 divisor = clk_get_rate(idev->i2c_clk) / idev->bus_clk_rate;
179 u32 clk_mhz = clk_get_rate(idev->i2c_clk) / 1000000;
186 dev_dbg(idev->dev, "rate=%uHz per_clk=%uMHz -> ratio=1:%u\n",
187 idev->bus_clk_rate, clk_mhz, divisor);
190 writel(0x01, idev->base + SOFT_RESET);
192 while (readl(idev->base + SOFT_RESET) & 1) {
194 dev_warn(idev->dev, "Soft reset failed\n");
200 writel(0x1, idev->base + GLOBAL_CONTROL);
202 if (idev->bus_clk_rate <= I2C_MAX_STANDARD_MODE_FREQ) {
215 writel(t_high, idev->base + SCL_HIGH_PERIOD);
217 writel(t_low, idev->base + SCL_LOW_PERIOD);
219 writel(t_setup, idev->base + SDA_SETUP_TIME);
221 writel(ns_to_clk(300, clk_mhz), idev->base + SDA_HOLD_TIME);
223 writel(ns_to_clk(50, clk_mhz), idev->base + SPIKE_FLTR_LEN);
238 writel(prescale, idev->base + TIMER_CLOCK_DIV);
240 writel(WT_EN | WT_VALUE(tmo_clk), idev->base + WAIT_TIMER_CONTROL);
243 i2c_int_disable(idev, ~0);
246 writel(0x01, idev->base + INTERRUPT_ENABLE);
270 static int axxia_i2c_empty_rx_fifo(struct axxia_i2c_dev *idev)
272 struct i2c_msg *msg = idev->msg_r;
273 size_t rx_fifo_avail = readl(idev->base + MST_RX_FIFO);
274 int bytes_to_transfer = min(rx_fifo_avail, msg->len - idev->msg_xfrd_r);
277 int c = readl(idev->base + MST_DATA);
279 if (idev->msg_xfrd_r == 0 && i2c_m_recv_len(msg)) {
284 idev->msg_err = -EPROTO;
285 i2c_int_disable(idev, ~MST_STATUS_TSS);
286 complete(&idev->msg_complete);
290 writel(msg->len, idev->base + MST_RX_XFER);
292 msg->buf[idev->msg_xfrd_r++] = c;
302 static int axxia_i2c_fill_tx_fifo(struct axxia_i2c_dev *idev)
304 struct i2c_msg *msg = idev->msg;
305 size_t tx_fifo_avail = FIFO_SIZE - readl(idev->base + MST_TX_FIFO);
306 int bytes_to_transfer = min(tx_fifo_avail, msg->len - idev->msg_xfrd);
307 int ret = msg->len - idev->msg_xfrd - bytes_to_transfer;
310 writel(msg->buf[idev->msg_xfrd++], idev->base + MST_DATA);
315 static void axxia_i2c_slv_fifo_event(struct axxia_i2c_dev *idev)
317 u32 fifo_status = readl(idev->base + SLV_RX_FIFO);
320 dev_dbg(idev->dev, "slave irq fifo_status=0x%x\n", fifo_status);
324 i2c_slave_event(idev->slave,
327 val = readl(idev->base + SLV_DATA);
328 i2c_slave_event(idev->slave, I2C_SLAVE_WRITE_RECEIVED, &val);
331 readl(idev->base + SLV_DATA); /* dummy read */
332 i2c_slave_event(idev->slave, I2C_SLAVE_STOP, &val);
335 readl(idev->base + SLV_DATA); /* dummy read */
338 static irqreturn_t axxia_i2c_slv_isr(struct axxia_i2c_dev *idev)
340 u32 status = readl(idev->base + SLV_INT_STATUS);
343 dev_dbg(idev->dev, "slave irq status=0x%x\n", status);
346 axxia_i2c_slv_fifo_event(idev);
348 i2c_slave_event(idev->slave, I2C_SLAVE_READ_REQUESTED, &val);
349 writel(val, idev->base + SLV_DATA);
352 i2c_slave_event(idev->slave, I2C_SLAVE_READ_PROCESSED, &val);
353 writel(val, idev->base + SLV_DATA);
356 i2c_slave_event(idev->slave, I2C_SLAVE_STOP, &val);
358 writel(INT_SLV, idev->base + INTERRUPT_STATUS);
364 struct axxia_i2c_dev *idev = _dev;
368 status = readl(idev->base + INTERRUPT_STATUS);
371 ret = axxia_i2c_slv_isr(idev);
376 status = readl(idev->base + MST_INT_STATUS);
378 if (!idev->msg) {
379 dev_warn(idev->dev, "unexpected interrupt\n");
384 if (i2c_m_rd(idev->msg_r) && (status & MST_STATUS_RFL))
385 axxia_i2c_empty_rx_fifo(idev);
388 if (!i2c_m_rd(idev->msg) && (status & MST_STATUS_TFL)) {
389 if (axxia_i2c_fill_tx_fifo(idev) == 0)
390 i2c_int_disable(idev, MST_STATUS_TFL);
395 i2c_int_disable(idev, ~0);
397 idev->msg_err = -EAGAIN;
399 idev->msg_err = -ENXIO;
401 idev->msg_err = -EIO;
402 dev_dbg(idev->dev, "error %#x, addr=%#x rx=%u/%u tx=%u/%u\n",
404 idev->msg->addr,
405 readl(idev->base + MST_RX_BYTES_XFRD),
406 readl(idev->base + MST_RX_XFER),
407 readl(idev->base + MST_TX_BYTES_XFRD),
408 readl(idev->base + MST_TX_XFER));
409 complete(&idev->msg_complete);
412 i2c_int_disable(idev, ~MST_STATUS_TSS);
413 complete(&idev->msg_complete);
416 int mask = idev->last ? ~0 : ~MST_STATUS_TSS;
418 i2c_int_disable(idev, mask);
419 if (i2c_m_rd(idev->msg_r) && idev->msg_xfrd_r < idev->msg_r->len)
420 axxia_i2c_empty_rx_fifo(idev);
421 complete(&idev->msg_complete);
424 idev->msg_err = -ETIMEDOUT;
425 i2c_int_disable(idev, ~MST_STATUS_TSS);
426 complete(&idev->msg_complete);
431 writel(INT_MST, idev->base + INTERRUPT_STATUS);
436 static void axxia_i2c_set_addr(struct axxia_i2c_dev *idev, struct i2c_msg *msg)
458 writel(addr_1, idev->base + MST_ADDR_1);
459 writel(addr_2, idev->base + MST_ADDR_2);
466 static int axxia_i2c_handle_seq_nak(struct axxia_i2c_dev *idev)
471 if ((readl(idev->base + MST_COMMAND) & CMD_BUSY) == 0)
479 static int axxia_i2c_xfer_seq(struct axxia_i2c_dev *idev, struct i2c_msg msgs[])
485 axxia_i2c_set_addr(idev, &msgs[0]);
487 writel(msgs[0].len, idev->base + MST_TX_XFER);
488 writel(rlen, idev->base + MST_RX_XFER);
490 idev->msg = &msgs[0];
491 idev->msg_r = &msgs[1];
492 idev->msg_xfrd = 0;
493 idev->msg_xfrd_r = 0;
494 idev->last = true;
495 axxia_i2c_fill_tx_fifo(idev);
497 writel(CMD_SEQUENCE, idev->base + MST_COMMAND);
499 reinit_completion(&idev->msg_complete);
500 i2c_int_enable(idev, int_mask);
502 time_left = wait_for_completion_timeout(&idev->msg_complete,
505 if (idev->msg_err == -ENXIO) {
506 if (axxia_i2c_handle_seq_nak(idev))
507 axxia_i2c_init(idev);
508 } else if (readl(idev->base + MST_COMMAND) & CMD_BUSY) {
509 dev_warn(idev->dev, "busy after xfer\n");
513 idev->msg_err = -ETIMEDOUT;
514 i2c_recover_bus(&idev->adapter);
515 axxia_i2c_init(idev);
518 if (unlikely(idev->msg_err) && idev->msg_err != -ENXIO)
519 axxia_i2c_init(idev);
521 return idev->msg_err;
524 static int axxia_i2c_xfer_msg(struct axxia_i2c_dev *idev, struct i2c_msg *msg,
532 idev->msg = msg;
533 idev->msg_r = msg;
534 idev->msg_xfrd = 0;
535 idev->msg_xfrd_r = 0;
536 idev->last = last;
537 reinit_completion(&idev->msg_complete);
539 axxia_i2c_set_addr(idev, msg);
551 writel(rx_xfer, idev->base + MST_RX_XFER);
552 writel(tx_xfer, idev->base + MST_TX_XFER);
556 else if (axxia_i2c_fill_tx_fifo(idev) != 0)
559 wt_value = WT_VALUE(readl(idev->base + WAIT_TIMER_CONTROL));
561 writel(wt_value, idev->base + WAIT_TIMER_CONTROL);
563 if (idev->msg_err)
567 writel(CMD_MANUAL, idev->base + MST_COMMAND);
570 writel(CMD_AUTO, idev->base + MST_COMMAND);
574 writel(WT_EN | wt_value, idev->base + WAIT_TIMER_CONTROL);
576 i2c_int_enable(idev, int_mask);
578 time_left = wait_for_completion_timeout(&idev->msg_complete,
581 i2c_int_disable(idev, int_mask);
583 if (readl(idev->base + MST_COMMAND) & CMD_BUSY)
584 dev_warn(idev->dev, "busy after xfer\n");
587 idev->msg_err = -ETIMEDOUT;
588 i2c_recover_bus(&idev->adapter);
589 axxia_i2c_init(idev);
593 if (unlikely(idev->msg_err) && idev->msg_err != -ENXIO &&
594 idev->msg_err != -ETIMEDOUT)
595 axxia_i2c_init(idev);
597 return idev->msg_err;
615 struct axxia_i2c_dev *idev = i2c_get_adapdata(adap);
619 idev->msg_err = 0;
622 ret = axxia_i2c_xfer_seq(idev, msgs);
626 i2c_int_enable(idev, MST_STATUS_TSS);
629 ret = axxia_i2c_xfer_msg(idev, &msgs[i], i == (num - 1));
636 struct axxia_i2c_dev *idev = i2c_get_adapdata(adap);
638 return !!(readl(idev->base + I2C_BUS_MONITOR) & BM_SCLS);
643 struct axxia_i2c_dev *idev = i2c_get_adapdata(adap);
647 tmp = readl(idev->base + I2C_BUS_MONITOR) & BM_SDAC;
650 writel(tmp, idev->base + I2C_BUS_MONITOR);
655 struct axxia_i2c_dev *idev = i2c_get_adapdata(adap);
657 return !!(readl(idev->base + I2C_BUS_MONITOR) & BM_SDAS);
676 struct axxia_i2c_dev *idev = i2c_get_adapdata(slave->adapter);
680 if (idev->slave)
683 idev->slave = slave;
686 writel(GLOBAL_MST_EN | GLOBAL_SLV_EN, idev->base + GLOBAL_CONTROL);
687 writel(INT_MST | INT_SLV, idev->base + INTERRUPT_ENABLE);
694 writel(SLV_RX_ACSA1, idev->base + SLV_RX_CTL);
695 writel(dec_ctl, idev->base + SLV_ADDR_DEC_CTL);
696 writel(slave->addr, idev->base + SLV_ADDR_1);
701 writel(slv_int_mask, idev->base + SLV_INT_ENABLE);
708 struct axxia_i2c_dev *idev = i2c_get_adapdata(slave->adapter);
711 writel(GLOBAL_MST_EN, idev->base + GLOBAL_CONTROL);
712 writel(INT_MST, idev->base + INTERRUPT_ENABLE);
714 synchronize_irq(idev->irq);
716 idev->slave = NULL;
736 struct axxia_i2c_dev *idev = NULL;
740 idev = devm_kzalloc(&pdev->dev, sizeof(*idev), GFP_KERNEL);
741 if (!idev)
748 idev->irq = platform_get_irq(pdev, 0);
749 if (idev->irq < 0)
750 return idev->irq;
752 idev->i2c_clk = devm_clk_get(&pdev->dev, "i2c");
753 if (IS_ERR(idev->i2c_clk)) {
755 return PTR_ERR(idev->i2c_clk);
758 idev->base = base;
759 idev->dev = &pdev->dev;
760 init_completion(&idev->msg_complete);
762 of_property_read_u32(np, "clock-frequency", &idev->bus_clk_rate);
763 if (idev->bus_clk_rate == 0)
764 idev->bus_clk_rate = I2C_MAX_STANDARD_MODE_FREQ; /* default clock rate */
766 ret = clk_prepare_enable(idev->i2c_clk);
772 ret = axxia_i2c_init(idev);
778 ret = devm_request_irq(&pdev->dev, idev->irq, axxia_i2c_isr, 0,
779 pdev->name, idev);
781 dev_err(&pdev->dev, "failed to claim IRQ%d\n", idev->irq);
785 i2c_set_adapdata(&idev->adapter, idev);
786 strlcpy(idev->adapter.name, pdev->name, sizeof(idev->adapter.name));
787 idev->adapter.owner = THIS_MODULE;
788 idev->adapter.algo = &axxia_i2c_algo;
789 idev->adapter.bus_recovery_info = &axxia_i2c_recovery_info;
790 idev->adapter.quirks = &axxia_i2c_quirks;
791 idev->adapter.dev.parent = &pdev->dev;
792 idev->adapter.dev.of_node = pdev->dev.of_node;
794 platform_set_drvdata(pdev, idev);
796 ret = i2c_add_adapter(&idev->adapter);
803 clk_disable_unprepare(idev->i2c_clk);
809 struct axxia_i2c_dev *idev = platform_get_drvdata(pdev);
811 clk_disable_unprepare(idev->i2c_clk);
812 i2c_del_adapter(&idev->adapter);