Lines Matching refs:dsi
111 #define REG_GET(dsi, idx, start, end) \
112 FLD_GET(dsi_read_reg(dsi, idx), start, end)
114 #define REG_FLD_MOD(dsi, idx, val, start, end) \
115 dsi_write_reg(dsi, idx, FLD_MOD(dsi_read_reg(dsi, idx), val, start, end))
207 static int dsi_display_init_dispc(struct dsi_data *dsi);
208 static void dsi_display_uninit_dispc(struct dsi_data *dsi);
210 static int dsi_vc_send_null(struct dsi_data *dsi, int channel);
273 struct dsi_data *dsi;
421 struct dsi_data *dsi;
435 static inline void dsi_write_reg(struct dsi_data *dsi,
441 case DSI_PROTO: base = dsi->proto_base; break;
442 case DSI_PHY: base = dsi->phy_base; break;
443 case DSI_PLL: base = dsi->pll_base; break;
450 static inline u32 dsi_read_reg(struct dsi_data *dsi, const struct dsi_reg idx)
455 case DSI_PROTO: base = dsi->proto_base; break;
456 case DSI_PHY: base = dsi->phy_base; break;
457 case DSI_PLL: base = dsi->pll_base; break;
466 struct dsi_data *dsi = to_dsi_data(dssdev);
468 down(&dsi->bus_lock);
473 struct dsi_data *dsi = to_dsi_data(dssdev);
475 up(&dsi->bus_lock);
478 static bool dsi_bus_is_locked(struct dsi_data *dsi)
480 return dsi->bus_lock.count == 0;
488 static inline bool wait_for_bit_change(struct dsi_data *dsi,
499 if (REG_GET(dsi, idx, bitnum, bitnum) == value)
506 if (REG_GET(dsi, idx, bitnum, bitnum) == value)
534 static void dsi_perf_mark_setup(struct dsi_data *dsi)
536 dsi->perf_setup_time = ktime_get();
539 static void dsi_perf_mark_start(struct dsi_data *dsi)
541 dsi->perf_start_time = ktime_get();
544 static void dsi_perf_show(struct dsi_data *dsi, const char *name)
555 setup_time = ktime_sub(dsi->perf_start_time, dsi->perf_setup_time);
560 trans_time = ktime_sub(t, dsi->perf_start_time);
567 total_bytes = dsi->update_bytes;
579 static inline void dsi_perf_mark_setup(struct dsi_data *dsi)
583 static inline void dsi_perf_mark_start(struct dsi_data *dsi)
587 static inline void dsi_perf_show(struct dsi_data *dsi, const char *name)
684 static void dsi_collect_irq_stats(struct dsi_data *dsi, u32 irqstatus,
689 spin_lock(&dsi->irq_stats_lock);
691 dsi->irq_stats.irq_count++;
692 dss_collect_irq_stats(irqstatus, dsi->irq_stats.dsi_irqs);
695 dss_collect_irq_stats(vcstatus[i], dsi->irq_stats.vc_irqs[i]);
697 dss_collect_irq_stats(ciostatus, dsi->irq_stats.cio_irqs);
699 spin_unlock(&dsi->irq_stats_lock);
702 #define dsi_collect_irq_stats(dsi, irqstatus, vcstatus, ciostatus)
707 static void dsi_handle_irq_errors(struct dsi_data *dsi, u32 irqstatus,
715 spin_lock(&dsi->errors_lock);
716 dsi->errors |= irqstatus & DSI_IRQ_ERROR_MASK;
717 spin_unlock(&dsi->errors_lock);
778 struct dsi_data *dsi = arg;
782 if (!dsi->is_enabled)
785 spin_lock(&dsi->irq_lock);
787 irqstatus = dsi_read_reg(dsi, DSI_IRQSTATUS);
791 spin_unlock(&dsi->irq_lock);
795 dsi_write_reg(dsi, DSI_IRQSTATUS, irqstatus & ~DSI_IRQ_CHANNEL_MASK);
797 dsi_read_reg(dsi, DSI_IRQSTATUS);
805 vcstatus[i] = dsi_read_reg(dsi, DSI_VC_IRQSTATUS(i));
807 dsi_write_reg(dsi, DSI_VC_IRQSTATUS(i), vcstatus[i]);
809 dsi_read_reg(dsi, DSI_VC_IRQSTATUS(i));
813 ciostatus = dsi_read_reg(dsi, DSI_COMPLEXIO_IRQ_STATUS);
815 dsi_write_reg(dsi, DSI_COMPLEXIO_IRQ_STATUS, ciostatus);
817 dsi_read_reg(dsi, DSI_COMPLEXIO_IRQ_STATUS);
824 del_timer(&dsi->te_timer);
829 memcpy(&dsi->isr_tables_copy, &dsi->isr_tables,
830 sizeof(dsi->isr_tables));
832 spin_unlock(&dsi->irq_lock);
834 dsi_handle_isrs(&dsi->isr_tables_copy, irqstatus, vcstatus, ciostatus);
836 dsi_handle_irq_errors(dsi, irqstatus, vcstatus, ciostatus);
838 dsi_collect_irq_stats(dsi, irqstatus, vcstatus, ciostatus);
843 /* dsi->irq_lock has to be locked by the caller */
844 static void _omap_dsi_configure_irqs(struct dsi_data *dsi,
867 old_mask = dsi_read_reg(dsi, enable_reg);
869 dsi_write_reg(dsi, status_reg, (mask ^ old_mask) & mask);
870 dsi_write_reg(dsi, enable_reg, mask);
873 dsi_read_reg(dsi, enable_reg);
874 dsi_read_reg(dsi, status_reg);
877 /* dsi->irq_lock has to be locked by the caller */
878 static void _omap_dsi_set_irqs(struct dsi_data *dsi)
884 _omap_dsi_configure_irqs(dsi, dsi->isr_tables.isr_table,
885 ARRAY_SIZE(dsi->isr_tables.isr_table), mask,
889 /* dsi->irq_lock has to be locked by the caller */
890 static void _omap_dsi_set_irqs_vc(struct dsi_data *dsi, int vc)
892 _omap_dsi_configure_irqs(dsi, dsi->isr_tables.isr_table_vc[vc],
893 ARRAY_SIZE(dsi->isr_tables.isr_table_vc[vc]),
898 /* dsi->irq_lock has to be locked by the caller */
899 static void _omap_dsi_set_irqs_cio(struct dsi_data *dsi)
901 _omap_dsi_configure_irqs(dsi, dsi->isr_tables.isr_table_cio,
902 ARRAY_SIZE(dsi->isr_tables.isr_table_cio),
907 static void _dsi_initialize_irq(struct dsi_data *dsi)
912 spin_lock_irqsave(&dsi->irq_lock, flags);
914 memset(&dsi->isr_tables, 0, sizeof(dsi->isr_tables));
916 _omap_dsi_set_irqs(dsi);
918 _omap_dsi_set_irqs_vc(dsi, vc);
919 _omap_dsi_set_irqs_cio(dsi);
921 spin_unlock_irqrestore(&dsi->irq_lock, flags);
980 static int dsi_register_isr(struct dsi_data *dsi, omap_dsi_isr_t isr,
986 spin_lock_irqsave(&dsi->irq_lock, flags);
988 r = _dsi_register_isr(isr, arg, mask, dsi->isr_tables.isr_table,
989 ARRAY_SIZE(dsi->isr_tables.isr_table));
992 _omap_dsi_set_irqs(dsi);
994 spin_unlock_irqrestore(&dsi->irq_lock, flags);
999 static int dsi_unregister_isr(struct dsi_data *dsi, omap_dsi_isr_t isr,
1005 spin_lock_irqsave(&dsi->irq_lock, flags);
1007 r = _dsi_unregister_isr(isr, arg, mask, dsi->isr_tables.isr_table,
1008 ARRAY_SIZE(dsi->isr_tables.isr_table));
1011 _omap_dsi_set_irqs(dsi);
1013 spin_unlock_irqrestore(&dsi->irq_lock, flags);
1018 static int dsi_register_isr_vc(struct dsi_data *dsi, int channel,
1024 spin_lock_irqsave(&dsi->irq_lock, flags);
1027 dsi->isr_tables.isr_table_vc[channel],
1028 ARRAY_SIZE(dsi->isr_tables.isr_table_vc[channel]));
1031 _omap_dsi_set_irqs_vc(dsi, channel);
1033 spin_unlock_irqrestore(&dsi->irq_lock, flags);
1038 static int dsi_unregister_isr_vc(struct dsi_data *dsi, int channel,
1044 spin_lock_irqsave(&dsi->irq_lock, flags);
1047 dsi->isr_tables.isr_table_vc[channel],
1048 ARRAY_SIZE(dsi->isr_tables.isr_table_vc[channel]));
1051 _omap_dsi_set_irqs_vc(dsi, channel);
1053 spin_unlock_irqrestore(&dsi->irq_lock, flags);
1058 static int dsi_register_isr_cio(struct dsi_data *dsi, omap_dsi_isr_t isr,
1064 spin_lock_irqsave(&dsi->irq_lock, flags);
1066 r = _dsi_register_isr(isr, arg, mask, dsi->isr_tables.isr_table_cio,
1067 ARRAY_SIZE(dsi->isr_tables.isr_table_cio));
1070 _omap_dsi_set_irqs_cio(dsi);
1072 spin_unlock_irqrestore(&dsi->irq_lock, flags);
1077 static int dsi_unregister_isr_cio(struct dsi_data *dsi, omap_dsi_isr_t isr,
1083 spin_lock_irqsave(&dsi->irq_lock, flags);
1085 r = _dsi_unregister_isr(isr, arg, mask, dsi->isr_tables.isr_table_cio,
1086 ARRAY_SIZE(dsi->isr_tables.isr_table_cio));
1089 _omap_dsi_set_irqs_cio(dsi);
1091 spin_unlock_irqrestore(&dsi->irq_lock, flags);
1096 static u32 dsi_get_errors(struct dsi_data *dsi)
1101 spin_lock_irqsave(&dsi->errors_lock, flags);
1102 e = dsi->errors;
1103 dsi->errors = 0;
1104 spin_unlock_irqrestore(&dsi->errors_lock, flags);
1108 static int dsi_runtime_get(struct dsi_data *dsi)
1114 r = pm_runtime_get_sync(dsi->dev);
1119 static void dsi_runtime_put(struct dsi_data *dsi)
1125 r = pm_runtime_put_sync(dsi->dev);
1129 static void _dsi_print_reset_status(struct dsi_data *dsi)
1137 l = dsi_read_reg(dsi, DSI_DSIPHY_CFG5);
1139 if (dsi->data->quirks & DSI_QUIRK_REVERSE_TXCLKESC) {
1150 FLD_GET(dsi_read_reg(dsi, DSI_##fld), start, end)
1165 static inline int dsi_if_enable(struct dsi_data *dsi, bool enable)
1170 REG_FLD_MOD(dsi, DSI_CTRL, enable, 0, 0); /* IF_EN */
1172 if (!wait_for_bit_change(dsi, DSI_CTRL, 0, enable)) {
1180 static unsigned long dsi_get_pll_hsdiv_dispc_rate(struct dsi_data *dsi)
1182 return dsi->pll.cinfo.clkout[HSDIV_DISPC];
1185 static unsigned long dsi_get_pll_hsdiv_dsi_rate(struct dsi_data *dsi)
1187 return dsi->pll.cinfo.clkout[HSDIV_DSI];
1190 static unsigned long dsi_get_txbyteclkhs(struct dsi_data *dsi)
1192 return dsi->pll.cinfo.clkdco / 16;
1195 static unsigned long dsi_fclk_rate(struct dsi_data *dsi)
1200 source = dss_get_dsi_clk_source(dsi->dss, dsi->module_id);
1203 r = clk_get_rate(dsi->dss_clk);
1206 r = dsi_get_pll_hsdiv_dsi_rate(dsi);
1231 static int dsi_set_lp_clk_divisor(struct dsi_data *dsi)
1236 unsigned int lpdiv_max = dsi->data->max_pll_lpdiv;
1239 lp_clk_div = dsi->user_lp_cinfo.lp_clk_div;
1244 dsi_fclk = dsi_fclk_rate(dsi);
1249 dsi->current_lp_cinfo.lp_clk = lp_clk;
1250 dsi->current_lp_cinfo.lp_clk_div = lp_clk_div;
1253 REG_FLD_MOD(dsi, DSI_CLK_CTRL, lp_clk_div, 12, 0);
1256 REG_FLD_MOD(dsi, DSI_CLK_CTRL, dsi_fclk > 30000000 ? 1 : 0, 21, 21);
1261 static void dsi_enable_scp_clk(struct dsi_data *dsi)
1263 if (dsi->scp_clk_refcount++ == 0)
1264 REG_FLD_MOD(dsi, DSI_CLK_CTRL, 1, 14, 14); /* CIO_CLK_ICG */
1267 static void dsi_disable_scp_clk(struct dsi_data *dsi)
1269 WARN_ON(dsi->scp_clk_refcount == 0);
1270 if (--dsi->scp_clk_refcount == 0)
1271 REG_FLD_MOD(dsi, DSI_CLK_CTRL, 0, 14, 14); /* CIO_CLK_ICG */
1281 static int dsi_pll_power(struct dsi_data *dsi, enum dsi_pll_power_state state)
1286 if ((dsi->data->quirks & DSI_QUIRK_PLL_PWR_BUG) &&
1291 REG_FLD_MOD(dsi, DSI_CLK_CTRL, state, 31, 30);
1294 while (FLD_GET(dsi_read_reg(dsi, DSI_CLK_CTRL), 29, 28) != state) {
1307 static void dsi_pll_calc_dsi_fck(struct dsi_data *dsi,
1312 max_dsi_fck = dsi->data->max_fck_freq;
1320 struct dsi_data *dsi = container_of(pll, struct dsi_data, pll);
1325 r = dsi_runtime_get(dsi);
1332 dsi_enable_scp_clk(dsi);
1334 r = regulator_enable(dsi->vdds_dsi_reg);
1339 dispc_pck_free_enable(dsi->dss->dispc, 1);
1341 if (!wait_for_bit_change(dsi, DSI_PLL_STATUS, 0, 1)) {
1344 dispc_pck_free_enable(dsi->dss->dispc, 0);
1350 dispc_pck_free_enable(dsi->dss->dispc, 0);
1352 r = dsi_pll_power(dsi, DSI_PLL_POWER_ON_ALL);
1361 regulator_disable(dsi->vdds_dsi_reg);
1363 dsi_disable_scp_clk(dsi);
1364 dsi_runtime_put(dsi);
1370 struct dsi_data *dsi = container_of(pll, struct dsi_data, pll);
1372 dsi_pll_power(dsi, DSI_PLL_POWER_OFF);
1374 regulator_disable(dsi->vdds_dsi_reg);
1376 dsi_disable_scp_clk(dsi);
1377 dsi_runtime_put(dsi);
1384 struct dsi_data *dsi = s->private;
1385 struct dss_pll_clock_info *cinfo = &dsi->pll.cinfo;
1387 int dsi_module = dsi->module_id;
1388 struct dss_pll *pll = &dsi->pll;
1390 dispc_clk_src = dss_get_dispc_clk_source(dsi->dss);
1391 dsi_clk_src = dss_get_dsi_clk_source(dsi->dss, dsi_module);
1393 if (dsi_runtime_get(dsi))
1398 seq_printf(s, "dsi pll clkin\t%lu\n", clk_get_rate(pll->clkin));
1425 seq_printf(s, "dsi fclk source = %s\n",
1428 seq_printf(s, "DSI_FCLK\t%lu\n", dsi_fclk_rate(dsi));
1433 seq_printf(s, "TxByteClkHS\t%lu\n", dsi_get_txbyteclkhs(dsi));
1435 seq_printf(s, "LP_CLK\t\t%lu\n", dsi->current_lp_cinfo.lp_clk);
1437 dsi_runtime_put(dsi);
1445 struct dsi_data *dsi = s->private;
1453 spin_lock_irqsave(&dsi->irq_stats_lock, flags);
1455 *stats = dsi->irq_stats;
1456 memset(&dsi->irq_stats, 0, sizeof(dsi->irq_stats));
1457 dsi->irq_stats.last_reset = jiffies;
1459 spin_unlock_irqrestore(&dsi->irq_stats_lock, flags);
1468 seq_printf(s, "-- DSI%d interrupts --\n", dsi->module_id + 1);
1542 struct dsi_data *dsi = s->private;
1544 if (dsi_runtime_get(dsi))
1546 dsi_enable_scp_clk(dsi);
1548 #define DUMPREG(r) seq_printf(s, "%-35s %08x\n", #r, dsi_read_reg(dsi, r))
1620 dsi_disable_scp_clk(dsi);
1621 dsi_runtime_put(dsi);
1632 static int dsi_cio_power(struct dsi_data *dsi, enum dsi_cio_power_state state)
1637 REG_FLD_MOD(dsi, DSI_COMPLEXIO_CFG1, state, 28, 27);
1640 while (FLD_GET(dsi_read_reg(dsi, DSI_COMPLEXIO_CFG1),
1653 static unsigned int dsi_get_line_buf_size(struct dsi_data *dsi)
1661 if (!(dsi->data->quirks & DSI_QUIRK_GNQ))
1664 val = REG_GET(dsi, DSI_GNQ, 14, 12); /* VP1_LINE_BUFFER_SIZE */
1687 static int dsi_set_lane_config(struct dsi_data *dsi)
1700 r = dsi_read_reg(dsi, DSI_COMPLEXIO_CFG1);
1702 for (i = 0; i < dsi->num_lanes_used; ++i) {
1707 for (t = 0; t < dsi->num_lanes_supported; ++t)
1708 if (dsi->lanes[t].function == functions[i])
1711 if (t == dsi->num_lanes_supported)
1715 polarity = dsi->lanes[t].polarity;
1722 for (; i < dsi->num_lanes_supported; ++i) {
1729 dsi_write_reg(dsi, DSI_COMPLEXIO_CFG1, r);
1734 static inline unsigned int ns2ddr(struct dsi_data *dsi, unsigned int ns)
1737 unsigned long ddr_clk = dsi->pll.cinfo.clkdco / 4;
1742 static inline unsigned int ddr2ns(struct dsi_data *dsi, unsigned int ddr)
1744 unsigned long ddr_clk = dsi->pll.cinfo.clkdco / 4;
1749 static void dsi_cio_timings(struct dsi_data *dsi)
1761 ths_prepare = ns2ddr(dsi, 70) + 2;
1764 ths_prepare_ths_zero = ns2ddr(dsi, 175) + 2;
1767 ths_trail = ns2ddr(dsi, 60) + 5;
1770 ths_exit = ns2ddr(dsi, 145);
1773 tlpx_half = ns2ddr(dsi, 25);
1776 tclk_trail = ns2ddr(dsi, 60) + 2;
1779 tclk_prepare = ns2ddr(dsi, 65);
1782 tclk_zero = ns2ddr(dsi, 260);
1785 ths_prepare, ddr2ns(dsi, ths_prepare),
1786 ths_prepare_ths_zero, ddr2ns(dsi, ths_prepare_ths_zero));
1788 ths_trail, ddr2ns(dsi, ths_trail),
1789 ths_exit, ddr2ns(dsi, ths_exit));
1793 tlpx_half, ddr2ns(dsi, tlpx_half),
1794 tclk_trail, ddr2ns(dsi, tclk_trail),
1795 tclk_zero, ddr2ns(dsi, tclk_zero));
1797 tclk_prepare, ddr2ns(dsi, tclk_prepare));
1801 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG0);
1806 dsi_write_reg(dsi, DSI_DSIPHY_CFG0, r);
1808 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG1);
1813 if (dsi->data->quirks & DSI_QUIRK_PHY_DCC) {
1819 dsi_write_reg(dsi, DSI_DSIPHY_CFG1, r);
1821 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG2);
1823 dsi_write_reg(dsi, DSI_DSIPHY_CFG2, r);
1827 static void dsi_cio_enable_lane_override(struct dsi_data *dsi,
1833 u8 lptxscp_start = dsi->num_lanes_supported == 3 ? 22 : 26;
1837 for (i = 0; i < dsi->num_lanes_supported; ++i) {
1838 unsigned int p = dsi->lanes[i].polarity;
1859 REG_FLD_MOD(dsi, DSI_DSIPHY_CFG10, l, lptxscp_start, 17);
1864 REG_FLD_MOD(dsi, DSI_DSIPHY_CFG10, 1, 27, 27);
1867 static void dsi_cio_disable_lane_override(struct dsi_data *dsi)
1870 REG_FLD_MOD(dsi, DSI_DSIPHY_CFG10, 0, 27, 27); /* ENLPTXSCPDAT */
1873 REG_FLD_MOD(dsi, DSI_DSIPHY_CFG10, 0, 22, 17);
1876 static int dsi_cio_wait_tx_clk_esc_reset(struct dsi_data *dsi)
1884 if (dsi->data->quirks & DSI_QUIRK_REVERSE_TXCLKESC)
1889 for (i = 0; i < dsi->num_lanes_supported; ++i)
1890 in_use[i] = dsi->lanes[i].function != DSI_LANE_UNUSED;
1897 l = dsi_read_reg(dsi, DSI_DSIPHY_CFG5);
1900 for (i = 0; i < dsi->num_lanes_supported; ++i) {
1905 if (ok == dsi->num_lanes_supported)
1909 for (i = 0; i < dsi->num_lanes_supported; ++i) {
1924 static unsigned int dsi_get_lane_mask(struct dsi_data *dsi)
1929 for (i = 0; i < dsi->num_lanes_supported; ++i) {
1930 if (dsi->lanes[i].function != DSI_LANE_UNUSED)
1949 static int dsi_omap4_mux_pads(struct dsi_data *dsi, unsigned int lanes)
1954 if (dsi->module_id == 0) {
1959 } else if (dsi->module_id == 1) {
1968 return regmap_update_bits(dsi->syscon, OMAP4_DSIPHY_SYSCON_OFFSET,
1981 static int dsi_omap5_mux_pads(struct dsi_data *dsi, unsigned int lanes)
1985 if (dsi->module_id == 0)
1987 else if (dsi->module_id == 1)
1992 return regmap_update_bits(dsi->syscon, OMAP5_DSIPHY_SYSCON_OFFSET,
1997 static int dsi_enable_pads(struct dsi_data *dsi, unsigned int lane_mask)
1999 if (dsi->data->model == DSI_MODEL_OMAP4)
2000 return dsi_omap4_mux_pads(dsi, lane_mask);
2001 if (dsi->data->model == DSI_MODEL_OMAP5)
2002 return dsi_omap5_mux_pads(dsi, lane_mask);
2006 static void dsi_disable_pads(struct dsi_data *dsi)
2008 if (dsi->data->model == DSI_MODEL_OMAP4)
2009 dsi_omap4_mux_pads(dsi, 0);
2010 else if (dsi->data->model == DSI_MODEL_OMAP5)
2011 dsi_omap5_mux_pads(dsi, 0);
2014 static int dsi_cio_init(struct dsi_data *dsi)
2021 r = dsi_enable_pads(dsi, dsi_get_lane_mask(dsi));
2025 dsi_enable_scp_clk(dsi);
2030 dsi_read_reg(dsi, DSI_DSIPHY_CFG5);
2032 if (!wait_for_bit_change(dsi, DSI_DSIPHY_CFG5, 30, 1)) {
2038 r = dsi_set_lane_config(dsi);
2043 l = dsi_read_reg(dsi, DSI_TIMING1);
2048 dsi_write_reg(dsi, DSI_TIMING1, l);
2050 if (dsi->ulps_enabled) {
2067 for (i = 0; i < dsi->num_lanes_supported; ++i) {
2068 if (dsi->lanes[i].function == DSI_LANE_UNUSED)
2073 dsi_cio_enable_lane_override(dsi, mask_p, 0);
2076 r = dsi_cio_power(dsi, DSI_COMPLEXIO_POWER_ON);
2080 if (!wait_for_bit_change(dsi, DSI_COMPLEXIO_CFG1, 29, 1)) {
2086 dsi_if_enable(dsi, true);
2087 dsi_if_enable(dsi, false);
2088 REG_FLD_MOD(dsi, DSI_CLK_CTRL, 1, 20, 20); /* LP_CLK_ENABLE */
2090 r = dsi_cio_wait_tx_clk_esc_reset(dsi);
2094 if (dsi->ulps_enabled) {
2102 dsi_cio_disable_lane_override(dsi);
2106 REG_FLD_MOD(dsi, DSI_TIMING1, 0, 15, 15);
2108 dsi_cio_timings(dsi);
2110 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) {
2112 REG_FLD_MOD(dsi, DSI_CLK_CTRL,
2113 dsi->vm_timings.ddr_clk_always_on, 13, 13);
2116 dsi->ulps_enabled = false;
2123 REG_FLD_MOD(dsi, DSI_CLK_CTRL, 0, 20, 20); /* LP_CLK_ENABLE */
2125 dsi_cio_power(dsi, DSI_COMPLEXIO_POWER_OFF);
2127 if (dsi->ulps_enabled)
2128 dsi_cio_disable_lane_override(dsi);
2130 dsi_disable_scp_clk(dsi);
2131 dsi_disable_pads(dsi);
2135 static void dsi_cio_uninit(struct dsi_data *dsi)
2138 REG_FLD_MOD(dsi, DSI_CLK_CTRL, 0, 13, 13);
2140 dsi_cio_power(dsi, DSI_COMPLEXIO_POWER_OFF);
2141 dsi_disable_scp_clk(dsi);
2142 dsi_disable_pads(dsi);
2145 static void dsi_config_tx_fifo(struct dsi_data *dsi,
2153 dsi->vc[0].tx_fifo_size = size1;
2154 dsi->vc[1].tx_fifo_size = size2;
2155 dsi->vc[2].tx_fifo_size = size3;
2156 dsi->vc[3].tx_fifo_size = size4;
2160 int size = dsi->vc[i].tx_fifo_size;
2174 dsi_write_reg(dsi, DSI_TX_FIFO_VC_SIZE, r);
2177 static void dsi_config_rx_fifo(struct dsi_data *dsi,
2185 dsi->vc[0].rx_fifo_size = size1;
2186 dsi->vc[1].rx_fifo_size = size2;
2187 dsi->vc[2].rx_fifo_size = size3;
2188 dsi->vc[3].rx_fifo_size = size4;
2192 int size = dsi->vc[i].rx_fifo_size;
2206 dsi_write_reg(dsi, DSI_RX_FIFO_VC_SIZE, r);
2209 static int dsi_force_tx_stop_mode_io(struct dsi_data *dsi)
2213 r = dsi_read_reg(dsi, DSI_TIMING1);
2215 dsi_write_reg(dsi, DSI_TIMING1, r);
2217 if (!wait_for_bit_change(dsi, DSI_TIMING1, 15, 0)) {
2225 static bool dsi_vc_is_enabled(struct dsi_data *dsi, int channel)
2227 return REG_GET(dsi, DSI_VC_CTRL(channel), 0, 0);
2234 struct dsi_data *dsi = vp_data->dsi;
2235 const int channel = dsi->update_channel;
2236 u8 bit = dsi->te_enabled ? 30 : 31;
2238 if (REG_GET(dsi, DSI_VC_TE(channel), bit, bit) == 0)
2242 static int dsi_sync_vc_vp(struct dsi_data *dsi, int channel)
2246 .dsi = dsi,
2252 bit = dsi->te_enabled ? 30 : 31;
2254 r = dsi_register_isr_vc(dsi, channel, dsi_packet_sent_handler_vp,
2260 if (REG_GET(dsi, DSI_VC_TE(channel), bit, bit)) {
2269 dsi_unregister_isr_vc(dsi, channel, dsi_packet_sent_handler_vp,
2274 dsi_unregister_isr_vc(dsi, channel, dsi_packet_sent_handler_vp,
2284 struct dsi_data *dsi = l4_data->dsi;
2285 const int channel = dsi->update_channel;
2287 if (REG_GET(dsi, DSI_VC_CTRL(channel), 5, 5) == 0)
2291 static int dsi_sync_vc_l4(struct dsi_data *dsi, int channel)
2295 .dsi = dsi,
2300 r = dsi_register_isr_vc(dsi, channel, dsi_packet_sent_handler_l4,
2306 if (REG_GET(dsi, DSI_VC_CTRL(channel), 5, 5)) {
2315 dsi_unregister_isr_vc(dsi, channel, dsi_packet_sent_handler_l4,
2320 dsi_unregister_isr_vc(dsi, channel, dsi_packet_sent_handler_l4,
2326 static int dsi_sync_vc(struct dsi_data *dsi, int channel)
2328 WARN_ON(!dsi_bus_is_locked(dsi));
2332 if (!dsi_vc_is_enabled(dsi, channel))
2335 switch (dsi->vc[channel].source) {
2337 return dsi_sync_vc_vp(dsi, channel);
2339 return dsi_sync_vc_l4(dsi, channel);
2346 static int dsi_vc_enable(struct dsi_data *dsi, int channel, bool enable)
2353 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), enable, 0, 0);
2355 if (!wait_for_bit_change(dsi, DSI_VC_CTRL(channel), 0, enable)) {
2363 static void dsi_vc_initial_config(struct dsi_data *dsi, int channel)
2369 r = dsi_read_reg(dsi, DSI_VC_CTRL(channel));
2382 if (dsi->data->quirks & DSI_QUIRK_VC_OCP_WIDTH)
2388 dsi_write_reg(dsi, DSI_VC_CTRL(channel), r);
2390 dsi->vc[channel].source = DSI_VC_SOURCE_L4;
2393 static int dsi_vc_config_source(struct dsi_data *dsi, int channel,
2396 if (dsi->vc[channel].source == source)
2401 dsi_sync_vc(dsi, channel);
2403 dsi_vc_enable(dsi, channel, 0);
2406 if (!wait_for_bit_change(dsi, DSI_VC_CTRL(channel), 15, 0)) {
2412 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), source, 1, 1);
2415 if (dsi->data->quirks & DSI_QUIRK_DCS_CMD_CONFIG_VC) {
2417 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), enable, 30, 30);
2420 dsi_vc_enable(dsi, channel, 1);
2422 dsi->vc[channel].source = source;
2430 struct dsi_data *dsi = to_dsi_data(dssdev);
2434 WARN_ON(!dsi_bus_is_locked(dsi));
2436 dsi_vc_enable(dsi, channel, 0);
2437 dsi_if_enable(dsi, 0);
2439 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), enable, 9, 9);
2441 dsi_vc_enable(dsi, channel, 1);
2442 dsi_if_enable(dsi, 1);
2444 dsi_force_tx_stop_mode_io(dsi);
2447 if (dsi->vm_timings.ddr_clk_always_on && enable)
2448 dsi_vc_send_null(dsi, channel);
2451 static void dsi_vc_flush_long_data(struct dsi_data *dsi, int channel)
2453 while (REG_GET(dsi, DSI_VC_CTRL(channel), 20, 20)) {
2455 val = dsi_read_reg(dsi, DSI_VC_SHORT_PACKET_HEADER(channel));
2501 static u16 dsi_vc_flush_receive_data(struct dsi_data *dsi, int channel)
2504 while (REG_GET(dsi, DSI_VC_CTRL(channel), 20, 20)) {
2507 val = dsi_read_reg(dsi, DSI_VC_SHORT_PACKET_HEADER(channel));
2522 dsi_vc_flush_long_data(dsi, channel);
2530 static int dsi_vc_send_bta(struct dsi_data *dsi, int channel)
2532 if (dsi->debug_write || dsi->debug_read)
2535 WARN_ON(!dsi_bus_is_locked(dsi));
2538 if (REG_GET(dsi, DSI_VC_CTRL(channel), 20, 20)) {
2540 dsi_vc_flush_receive_data(dsi, channel);
2543 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), 1, 6, 6); /* BTA_EN */
2546 dsi_read_reg(dsi, DSI_VC_CTRL(channel));
2553 struct dsi_data *dsi = to_dsi_data(dssdev);
2558 r = dsi_register_isr_vc(dsi, channel, dsi_completion_handler,
2563 r = dsi_register_isr(dsi, dsi_completion_handler, &completion,
2568 r = dsi_vc_send_bta(dsi, channel);
2579 err = dsi_get_errors(dsi);
2586 dsi_unregister_isr(dsi, dsi_completion_handler, &completion,
2589 dsi_unregister_isr_vc(dsi, channel, dsi_completion_handler,
2595 static inline void dsi_vc_write_long_header(struct dsi_data *dsi, int channel,
2601 WARN_ON(!dsi_bus_is_locked(dsi));
2603 data_id = data_type | dsi->vc[channel].vc_id << 6;
2608 dsi_write_reg(dsi, DSI_VC_LONG_PACKET_HEADER(channel), val);
2611 static inline void dsi_vc_write_long_payload(struct dsi_data *dsi, int channel,
2621 dsi_write_reg(dsi, DSI_VC_LONG_PACKET_PAYLOAD(channel), val);
2624 static int dsi_vc_send_long(struct dsi_data *dsi, int channel, u8 data_type,
2633 if (dsi->debug_write)
2637 if (dsi->vc[channel].tx_fifo_size * 32 * 4 < len + 4) {
2642 dsi_vc_config_source(dsi, channel, DSI_VC_SOURCE_L4);
2644 dsi_vc_write_long_header(dsi, channel, data_type, len, ecc);
2648 if (dsi->debug_write)
2656 dsi_vc_write_long_payload(dsi, channel, b1, b2, b3, b4);
2663 if (dsi->debug_write)
2681 dsi_vc_write_long_payload(dsi, channel, b1, b2, b3, 0);
2687 static int dsi_vc_send_short(struct dsi_data *dsi, int channel, u8 data_type,
2693 WARN_ON(!dsi_bus_is_locked(dsi));
2695 if (dsi->debug_write)
2700 dsi_vc_config_source(dsi, channel, DSI_VC_SOURCE_L4);
2702 if (FLD_GET(dsi_read_reg(dsi, DSI_VC_CTRL(channel)), 16, 16)) {
2707 data_id = data_type | dsi->vc[channel].vc_id << 6;
2711 dsi_write_reg(dsi, DSI_VC_SHORT_PACKET_HEADER(channel), r);
2716 static int dsi_vc_send_null(struct dsi_data *dsi, int channel)
2718 return dsi_vc_send_long(dsi, channel, MIPI_DSI_NULL_PACKET, NULL, 0, 0);
2721 static int dsi_vc_write_nosync_common(struct dsi_data *dsi, int channel,
2729 r = dsi_vc_send_short(dsi, channel,
2732 r = dsi_vc_send_short(dsi, channel,
2737 r = dsi_vc_send_short(dsi, channel,
2743 r = dsi_vc_send_long(dsi, channel,
2755 struct dsi_data *dsi = to_dsi_data(dssdev);
2757 return dsi_vc_write_nosync_common(dsi, channel, data, len,
2764 struct dsi_data *dsi = to_dsi_data(dssdev);
2766 return dsi_vc_write_nosync_common(dsi, channel, data, len,
2774 struct dsi_data *dsi = to_dsi_data(dssdev);
2777 r = dsi_vc_write_nosync_common(dsi, channel, data, len, type);
2786 if (REG_GET(dsi, DSI_VC_CTRL(channel), 20, 20)) {
2788 dsi_vc_flush_receive_data(dsi, channel);
2814 static int dsi_vc_dcs_send_read_request(struct dsi_data *dsi, int channel,
2819 if (dsi->debug_read)
2823 r = dsi_vc_send_short(dsi, channel, MIPI_DSI_DCS_READ, dcs_cmd, 0);
2833 static int dsi_vc_generic_send_read_request(struct dsi_data *dsi, int channel,
2840 if (dsi->debug_read)
2858 r = dsi_vc_send_short(dsi, channel, data_type, data, 0);
2868 static int dsi_vc_read_rx_fifo(struct dsi_data *dsi, int channel, u8 *buf,
2876 if (REG_GET(dsi, DSI_VC_CTRL(channel), 20, 20) == 0) {
2882 val = dsi_read_reg(dsi, DSI_VC_SHORT_PACKET_HEADER(channel));
2883 if (dsi->debug_read)
2896 if (dsi->debug_read)
2913 if (dsi->debug_read)
2932 if (dsi->debug_read)
2945 val = dsi_read_reg(dsi,
2947 if (dsi->debug_read)
2979 struct dsi_data *dsi = to_dsi_data(dssdev);
2982 r = dsi_vc_dcs_send_read_request(dsi, channel, dcs_cmd);
2990 r = dsi_vc_read_rx_fifo(dsi, channel, buf, buflen,
3009 struct dsi_data *dsi = to_dsi_data(dssdev);
3012 r = dsi_vc_generic_send_read_request(dsi, channel, reqdata, reqlen);
3020 r = dsi_vc_read_rx_fifo(dsi, channel, buf, buflen,
3036 struct dsi_data *dsi = to_dsi_data(dssdev);
3038 return dsi_vc_send_short(dsi, channel,
3042 static int dsi_enter_ulps(struct dsi_data *dsi)
3050 WARN_ON(!dsi_bus_is_locked(dsi));
3052 WARN_ON(dsi->ulps_enabled);
3054 if (dsi->ulps_enabled)
3058 if (REG_GET(dsi, DSI_CLK_CTRL, 13, 13)) {
3059 dsi_if_enable(dsi, 0);
3060 REG_FLD_MOD(dsi, DSI_CLK_CTRL, 0, 13, 13);
3061 dsi_if_enable(dsi, 1);
3064 dsi_sync_vc(dsi, 0);
3065 dsi_sync_vc(dsi, 1);
3066 dsi_sync_vc(dsi, 2);
3067 dsi_sync_vc(dsi, 3);
3069 dsi_force_tx_stop_mode_io(dsi);
3071 dsi_vc_enable(dsi, 0, false);
3072 dsi_vc_enable(dsi, 1, false);
3073 dsi_vc_enable(dsi, 2, false);
3074 dsi_vc_enable(dsi, 3, false);
3076 if (REG_GET(dsi, DSI_COMPLEXIO_CFG2, 16, 16)) { /* HS_BUSY */
3081 if (REG_GET(dsi, DSI_COMPLEXIO_CFG2, 17, 17)) { /* LP_BUSY */
3086 r = dsi_register_isr_cio(dsi, dsi_completion_handler, &completion,
3093 for (i = 0; i < dsi->num_lanes_supported; ++i) {
3094 if (dsi->lanes[i].function == DSI_LANE_UNUSED)
3100 REG_FLD_MOD(dsi, DSI_COMPLEXIO_CFG2, mask, 9, 5);
3103 dsi_read_reg(dsi, DSI_COMPLEXIO_CFG2);
3112 dsi_unregister_isr_cio(dsi, dsi_completion_handler, &completion,
3116 REG_FLD_MOD(dsi, DSI_COMPLEXIO_CFG2, 0, 9, 5);
3119 dsi_read_reg(dsi, DSI_COMPLEXIO_CFG2);
3121 dsi_cio_power(dsi, DSI_COMPLEXIO_POWER_ULPS);
3123 dsi_if_enable(dsi, false);
3125 dsi->ulps_enabled = true;
3130 dsi_unregister_isr_cio(dsi, dsi_completion_handler, &completion,
3135 static void dsi_set_lp_rx_timeout(struct dsi_data *dsi, unsigned int ticks,
3145 fck = dsi_fclk_rate(dsi);
3147 r = dsi_read_reg(dsi, DSI_TIMING2);
3152 dsi_write_reg(dsi, DSI_TIMING2, r);
3162 static void dsi_set_ta_timeout(struct dsi_data *dsi, unsigned int ticks,
3172 fck = dsi_fclk_rate(dsi);
3174 r = dsi_read_reg(dsi, DSI_TIMING1);
3179 dsi_write_reg(dsi, DSI_TIMING1, r);
3189 static void dsi_set_stop_state_counter(struct dsi_data *dsi, unsigned int ticks,
3199 fck = dsi_fclk_rate(dsi);
3201 r = dsi_read_reg(dsi, DSI_TIMING1);
3206 dsi_write_reg(dsi, DSI_TIMING1, r);
3216 static void dsi_set_hs_tx_timeout(struct dsi_data *dsi, unsigned int ticks,
3226 fck = dsi_get_txbyteclkhs(dsi);
3228 r = dsi_read_reg(dsi, DSI_TIMING2);
3233 dsi_write_reg(dsi, DSI_TIMING2, r);
3243 static void dsi_config_vp_num_line_buffers(struct dsi_data *dsi)
3247 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) {
3248 int bpp = dsi_get_pixel_size(dsi->pix_fmt);
3249 const struct videomode *vm = &dsi->vm;
3254 if (dsi->line_buffer_size <= vm->hactive * bpp / 8)
3264 REG_FLD_MOD(dsi, DSI_CTRL, num_line_buffers, 13, 12);
3267 static void dsi_config_vp_sync_events(struct dsi_data *dsi)
3272 if (dsi->vm_timings.trans_mode == OMAP_DSS_DSI_PULSE_MODE)
3277 r = dsi_read_reg(dsi, DSI_CTRL);
3285 dsi_write_reg(dsi, DSI_CTRL, r);
3288 static void dsi_config_blanking_modes(struct dsi_data *dsi)
3290 int blanking_mode = dsi->vm_timings.blanking_mode;
3291 int hfp_blanking_mode = dsi->vm_timings.hfp_blanking_mode;
3292 int hbp_blanking_mode = dsi->vm_timings.hbp_blanking_mode;
3293 int hsa_blanking_mode = dsi->vm_timings.hsa_blanking_mode;
3300 r = dsi_read_reg(dsi, DSI_CTRL);
3305 dsi_write_reg(dsi, DSI_CTRL, r);
3370 static void dsi_config_cmd_mode_interleaving(struct dsi_data *dsi)
3378 const struct videomode *vm = &dsi->vm;
3379 int bpp = dsi_get_pixel_size(dsi->pix_fmt);
3380 int ndl = dsi->num_lanes_used - 1;
3381 int dsi_fclk_hsdiv = dsi->user_dsi_cinfo.mX[HSDIV_DSI] + 1;
3388 r = dsi_read_reg(dsi, DSI_CTRL);
3394 r = dsi_read_reg(dsi, DSI_VM_TIMING1);
3399 r = dsi_read_reg(dsi, DSI_CLK_TIMING);
3403 r = dsi_read_reg(dsi, DSI_VM_TIMING7);
3407 r = dsi_read_reg(dsi, DSI_CLK_CTRL);
3411 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG0);
3414 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG1);
3468 r = dsi_read_reg(dsi, DSI_VM_TIMING4);
3472 dsi_write_reg(dsi, DSI_VM_TIMING4, r);
3474 r = dsi_read_reg(dsi, DSI_VM_TIMING5);
3478 dsi_write_reg(dsi, DSI_VM_TIMING5, r);
3480 r = dsi_read_reg(dsi, DSI_VM_TIMING6);
3483 dsi_write_reg(dsi, DSI_VM_TIMING6, r);
3486 static int dsi_proto_config(struct dsi_data *dsi)
3491 dsi_config_tx_fifo(dsi, DSI_FIFO_SIZE_32,
3496 dsi_config_rx_fifo(dsi, DSI_FIFO_SIZE_32,
3502 dsi_set_stop_state_counter(dsi, 0x1000, false, false);
3503 dsi_set_ta_timeout(dsi, 0x1fff, true, true);
3504 dsi_set_lp_rx_timeout(dsi, 0x1fff, true, true);
3505 dsi_set_hs_tx_timeout(dsi, 0x1fff, true, true);
3507 switch (dsi_get_pixel_size(dsi->pix_fmt)) {
3522 r = dsi_read_reg(dsi, DSI_CTRL);
3531 if (!(dsi->data->quirks & DSI_QUIRK_DCS_CMD_CONFIG_VC)) {
3537 dsi_write_reg(dsi, DSI_CTRL, r);
3539 dsi_config_vp_num_line_buffers(dsi);
3541 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) {
3542 dsi_config_vp_sync_events(dsi);
3543 dsi_config_blanking_modes(dsi);
3544 dsi_config_cmd_mode_interleaving(dsi);
3547 dsi_vc_initial_config(dsi, 0);
3548 dsi_vc_initial_config(dsi, 1);
3549 dsi_vc_initial_config(dsi, 2);
3550 dsi_vc_initial_config(dsi, 3);
3555 static void dsi_proto_timings(struct dsi_data *dsi)
3564 int ndl = dsi->num_lanes_used - 1;
3567 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG0);
3574 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG1);
3578 r = dsi_read_reg(dsi, DSI_DSIPHY_CFG2);
3584 tclk_post = ns2ddr(dsi, 60) + 26;
3595 r = dsi_read_reg(dsi, DSI_CLK_TIMING);
3598 dsi_write_reg(dsi, DSI_CLK_TIMING, r);
3612 dsi_write_reg(dsi, DSI_VM_TIMING7, r);
3617 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) {
3619 int hsa = dsi->vm_timings.hsa;
3620 int hfp = dsi->vm_timings.hfp;
3621 int hbp = dsi->vm_timings.hbp;
3622 int vsa = dsi->vm_timings.vsa;
3623 int vfp = dsi->vm_timings.vfp;
3624 int vbp = dsi->vm_timings.vbp;
3625 int window_sync = dsi->vm_timings.window_sync;
3627 const struct videomode *vm = &dsi->vm;
3628 int bpp = dsi_get_pixel_size(dsi->pix_fmt);
3631 hsync_end = dsi->vm_timings.trans_mode == OMAP_DSS_DSI_PULSE_MODE;
3646 r = dsi_read_reg(dsi, DSI_VM_TIMING1);
3650 dsi_write_reg(dsi, DSI_VM_TIMING1, r);
3652 r = dsi_read_reg(dsi, DSI_VM_TIMING2);
3657 dsi_write_reg(dsi, DSI_VM_TIMING2, r);
3659 r = dsi_read_reg(dsi, DSI_VM_TIMING3);
3662 dsi_write_reg(dsi, DSI_VM_TIMING3, r);
3669 struct dsi_data *dsi = to_dsi_data(dssdev);
3687 if (num_pins < 4 || num_pins > dsi->num_lanes_supported * 2
3703 if (dx < 0 || dx >= dsi->num_lanes_supported * 2)
3706 if (dy < 0 || dy >= dsi->num_lanes_supported * 2)
3726 memcpy(dsi->lanes, lanes, sizeof(dsi->lanes));
3727 dsi->num_lanes_used = num_lanes;
3734 struct dsi_data *dsi = to_dsi_data(dssdev);
3735 int bpp = dsi_get_pixel_size(dsi->pix_fmt);
3740 r = dsi_display_init_dispc(dsi);
3744 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) {
3745 switch (dsi->pix_fmt) {
3763 dsi_if_enable(dsi, false);
3764 dsi_vc_enable(dsi, channel, false);
3767 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), 1, 4, 4);
3769 word_count = DIV_ROUND_UP(dsi->vm.hactive * bpp, 8);
3771 dsi_vc_write_long_header(dsi, channel, data_type,
3774 dsi_vc_enable(dsi, channel, true);
3775 dsi_if_enable(dsi, true);
3778 r = dss_mgr_enable(&dsi->output);
3785 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) {
3786 dsi_if_enable(dsi, false);
3787 dsi_vc_enable(dsi, channel, false);
3790 dsi_display_uninit_dispc(dsi);
3796 struct dsi_data *dsi = to_dsi_data(dssdev);
3798 if (dsi->mode == OMAP_DSS_DSI_VIDEO_MODE) {
3799 dsi_if_enable(dsi, false);
3800 dsi_vc_enable(dsi, channel, false);
3803 REG_FLD_MOD(dsi, DSI_VC_CTRL(channel), 0, 4, 4);
3805 dsi_vc_enable(dsi, channel, true);
3806 dsi_if_enable(dsi, true);
3809 dss_mgr_disable(&dsi->output);
3811 dsi_display_uninit_dispc(dsi);
3814 static void dsi_update_screen_dispc(struct dsi_data *dsi)
3824 const unsigned channel = dsi->update_channel;
3825 const unsigned int line_buf_size = dsi->line_buffer_size;
3826 u16 w = dsi->vm.hactive;
3827 u16 h = dsi->vm.vactive;
3831 dsi_vc_config_source(dsi, channel, DSI_VC_SOURCE_VP);
3833 bytespp = dsi_get_pixel_size(dsi->pix_fmt) / 8;
3852 dsi_write_reg(dsi, DSI_VC_TE(channel), l);
3854 dsi_vc_write_long_header(dsi, channel, MIPI_DSI_DCS_LONG_WRITE,
3857 if (dsi->te_enabled)
3861 dsi_write_reg(dsi, DSI_VC_TE(channel), l);
3869 dispc_disable_sidle(dsi->dss->dispc);
3871 dsi_perf_mark_start(dsi);
3873 r = schedule_delayed_work(&dsi->framedone_timeout_work,
3877 dss_mgr_start_update(&dsi->output);
3879 if (dsi->te_enabled) {
3882 REG_FLD_MOD(dsi, DSI_TIMING2, 0, 15, 15); /* LP_RX_TO */
3884 dsi_vc_send_bta(dsi, channel);
3887 mod_timer(&dsi->te_timer, jiffies + msecs_to_jiffies(250));
3899 static void dsi_handle_framedone(struct dsi_data *dsi, int error)
3902 dispc_enable_sidle(dsi->dss->dispc);
3904 if (dsi->te_enabled) {
3906 REG_FLD_MOD(dsi, DSI_TIMING2, 1, 15, 15); /* LP_RX_TO */
3909 dsi->framedone_callback(error, dsi->framedone_data);
3912 dsi_perf_show(dsi, "DISPC");
3917 struct dsi_data *dsi = container_of(work, struct dsi_data,
3928 dsi_handle_framedone(dsi, -ETIMEDOUT);
3933 struct dsi_data *dsi = data;
3940 cancel_delayed_work(&dsi->framedone_timeout_work);
3942 dsi_handle_framedone(dsi, 0);
3948 struct dsi_data *dsi = to_dsi_data(dssdev);
3951 dsi_perf_mark_setup(dsi);
3953 dsi->update_channel = channel;
3955 dsi->framedone_callback = callback;
3956 dsi->framedone_data = data;
3958 dw = dsi->vm.hactive;
3959 dh = dsi->vm.vactive;
3962 dsi->update_bytes = dw * dh *
3963 dsi_get_pixel_size(dsi->pix_fmt) / 8;
3965 dsi_update_screen_dispc(dsi);
3972 static int dsi_configure_dispc_clocks(struct dsi_data *dsi)
3978 fck = dsi_get_pll_hsdiv_dispc_rate(dsi);
3980 dispc_cinfo.lck_div = dsi->user_dispc_cinfo.lck_div;
3981 dispc_cinfo.pck_div = dsi->user_dispc_cinfo.pck_div;
3983 r = dispc_calc_clock_rates(dsi->dss->dispc, fck, &dispc_cinfo);
3989 dsi->mgr_config.clock_info = dispc_cinfo;
3994 static int dsi_display_init_dispc(struct dsi_data *dsi)
3996 enum omap_channel channel = dsi->output.dispc_channel;
3999 dss_select_lcd_clk_source(dsi->dss, channel, dsi->module_id == 0 ?
4003 if (dsi->mode == OMAP_DSS_DSI_CMD_MODE) {
4004 r = dss_mgr_register_framedone_handler(&dsi->output,
4005 dsi_framedone_irq_callback, dsi);
4011 dsi->mgr_config.stallmode = true;
4012 dsi->mgr_config.fifohandcheck = true;
4014 dsi->mgr_config.stallmode = false;
4015 dsi->mgr_config.fifohandcheck = false;
4018 r = dsi_configure_dispc_clocks(dsi);
4022 dsi->mgr_config.io_pad_mode = DSS_IO_PAD_MODE_BYPASS;
4023 dsi->mgr_config.video_port_width =
4024 dsi_get_pixel_size(dsi->pix_fmt);
4025 dsi->mgr_config.lcden_sig_polarity = 0;
4027 dss_mgr_set_lcd_config(&dsi->output, &dsi->mgr_config);
4031 if (dsi->mode == OMAP_DSS_DSI_CMD_MODE)
4032 dss_mgr_unregister_framedone_handler(&dsi->output,
4033 dsi_framedone_irq_callback, dsi);
4035 dss_select_lcd_clk_source(dsi->dss, channel, DSS_CLK_SRC_FCK);
4039 static void dsi_display_uninit_dispc(struct dsi_data *dsi)
4041 enum omap_channel channel = dsi->output.dispc_channel;
4043 if (dsi->mode == OMAP_DSS_DSI_CMD_MODE)
4044 dss_mgr_unregister_framedone_handler(&dsi->output,
4045 dsi_framedone_irq_callback, dsi);
4047 dss_select_lcd_clk_source(dsi->dss, channel, DSS_CLK_SRC_FCK);
4050 static int dsi_configure_dsi_clocks(struct dsi_data *dsi)
4055 cinfo = dsi->user_dsi_cinfo;
4057 r = dss_pll_set_config(&dsi->pll, &cinfo);
4059 DSSERR("Failed to set dsi clocks\n");
4066 static int dsi_display_init_dsi(struct dsi_data *dsi)
4070 r = dss_pll_enable(&dsi->pll);
4074 r = dsi_configure_dsi_clocks(dsi);
4078 dss_select_dsi_clk_source(dsi->dss, dsi->module_id,
4079 dsi->module_id == 0 ?
4084 if (!dsi->vdds_dsi_enabled) {
4085 r = regulator_enable(dsi->vdds_dsi_reg);
4089 dsi->vdds_dsi_enabled = true;
4092 r = dsi_cio_init(dsi);
4096 _dsi_print_reset_status(dsi);
4098 dsi_proto_timings(dsi);
4099 dsi_set_lp_clk_divisor(dsi);
4102 _dsi_print_reset_status(dsi);
4104 r = dsi_proto_config(dsi);
4109 dsi_vc_enable(dsi, 0, 1);
4110 dsi_vc_enable(dsi, 1, 1);
4111 dsi_vc_enable(dsi, 2, 1);
4112 dsi_vc_enable(dsi, 3, 1);
4113 dsi_if_enable(dsi, 1);
4114 dsi_force_tx_stop_mode_io(dsi);
4118 dsi_cio_uninit(dsi);
4120 regulator_disable(dsi->vdds_dsi_reg);
4121 dsi->vdds_dsi_enabled = false;
4123 dss_select_dsi_clk_source(dsi->dss, dsi->module_id, DSS_CLK_SRC_FCK);
4125 dss_pll_disable(&dsi->pll);
4130 static void dsi_display_uninit_dsi(struct dsi_data *dsi, bool disconnect_lanes,
4133 if (enter_ulps && !dsi->ulps_enabled)
4134 dsi_enter_ulps(dsi);
4137 dsi_if_enable(dsi, 0);
4138 dsi_vc_enable(dsi, 0, 0);
4139 dsi_vc_enable(dsi, 1, 0);
4140 dsi_vc_enable(dsi, 2, 0);
4141 dsi_vc_enable(dsi, 3, 0);
4143 dss_select_dsi_clk_source(dsi->dss, dsi->module_id, DSS_CLK_SRC_FCK);
4144 dsi_cio_uninit(dsi);
4145 dss_pll_disable(&dsi->pll);
4148 regulator_disable(dsi->vdds_dsi_reg);
4149 dsi->vdds_dsi_enabled = false;
4155 struct dsi_data *dsi = to_dsi_data(dssdev);
4160 WARN_ON(!dsi_bus_is_locked(dsi));
4162 mutex_lock(&dsi->lock);
4164 r = dsi_runtime_get(dsi);
4168 _dsi_initialize_irq(dsi);
4170 r = dsi_display_init_dsi(dsi);
4174 mutex_unlock(&dsi->lock);
4179 dsi_runtime_put(dsi);
4181 mutex_unlock(&dsi->lock);
4188 struct dsi_data *dsi = to_dsi_data(dssdev);
4192 WARN_ON(!dsi_bus_is_locked(dsi));
4194 mutex_lock(&dsi->lock);
4196 dsi_sync_vc(dsi, 0);
4197 dsi_sync_vc(dsi, 1);
4198 dsi_sync_vc(dsi, 2);
4199 dsi_sync_vc(dsi, 3);
4201 dsi_display_uninit_dsi(dsi, disconnect_lanes, enter_ulps);
4203 dsi_runtime_put(dsi);
4205 mutex_unlock(&dsi->lock);
4210 struct dsi_data *dsi = to_dsi_data(dssdev);
4212 dsi->te_enabled = enable;
4331 return dispc_div_calc(ctx->dsi->dss->dispc, dispc,
4340 struct dsi_data *dsi = ctx->dsi;
4348 dsi->data->max_fck_freq,
4352 static bool dsi_cm_calc(struct dsi_data *dsi,
4361 clkin = clk_get_rate(dsi->pll.clkin);
4363 ndl = dsi->num_lanes_used - 1;
4376 ctx->dsi = dsi;
4377 ctx->pll = &dsi->pll;
4393 struct dsi_data *dsi = ctx->dsi;
4396 int ndl = dsi->num_lanes_used - 1;
4433 if (dsi->line_buffer_size < xres * bitspp / 8) {
4605 print_dsi_vm("dsi ", &ctx->dsi_vm);
4632 return dispc_div_calc(ctx->dsi->dss->dispc, dispc,
4641 struct dsi_data *dsi = ctx->dsi;
4649 dsi->data->max_fck_freq,
4653 static bool dsi_vm_calc(struct dsi_data *dsi,
4661 int ndl = dsi->num_lanes_used - 1;
4665 clkin = clk_get_rate(dsi->pll.clkin);
4668 ctx->dsi = dsi;
4669 ctx->pll = &dsi->pll;
4698 struct dsi_data *dsi = to_dsi_data(dssdev);
4703 mutex_lock(&dsi->lock);
4705 dsi->pix_fmt = config->pixel_format;
4706 dsi->mode = config->mode;
4709 ok = dsi_vm_calc(dsi, config, &ctx);
4711 ok = dsi_cm_calc(dsi, config, &ctx);
4719 dsi_pll_calc_dsi_fck(dsi, &ctx.dsi_cinfo);
4722 config->lp_clk_min, config->lp_clk_max, &dsi->user_lp_cinfo);
4728 dsi->user_dsi_cinfo = ctx.dsi_cinfo;
4729 dsi->user_dispc_cinfo = ctx.dispc_cinfo;
4731 dsi->vm = ctx.vm;
4737 dsi->vm.flags &= ~DISPLAY_FLAGS_INTERLACED;
4738 dsi->vm.flags &= ~DISPLAY_FLAGS_HSYNC_LOW;
4739 dsi->vm.flags |= DISPLAY_FLAGS_HSYNC_HIGH;
4740 dsi->vm.flags &= ~DISPLAY_FLAGS_VSYNC_LOW;
4741 dsi->vm.flags |= DISPLAY_FLAGS_VSYNC_HIGH;
4747 dsi->vm.flags &= ~DISPLAY_FLAGS_PIXDATA_NEGEDGE;
4748 dsi->vm.flags |= DISPLAY_FLAGS_PIXDATA_POSEDGE;
4749 dsi->vm.flags &= ~DISPLAY_FLAGS_DE_LOW;
4750 dsi->vm.flags |= DISPLAY_FLAGS_DE_HIGH;
4751 dsi->vm.flags &= ~DISPLAY_FLAGS_SYNC_POSEDGE;
4752 dsi->vm.flags |= DISPLAY_FLAGS_SYNC_NEGEDGE;
4754 dss_mgr_set_timings(&dsi->output, &dsi->vm);
4756 dsi->vm_timings = ctx.dsi_vm;
4758 mutex_unlock(&dsi->lock);
4762 mutex_unlock(&dsi->lock);
4773 static enum omap_channel dsi_get_channel(struct dsi_data *dsi)
4775 switch (dsi->data->model) {
4780 switch (dsi->module_id) {
4791 switch (dsi->module_id) {
4809 struct dsi_data *dsi = to_dsi_data(dssdev);
4812 for (i = 0; i < ARRAY_SIZE(dsi->vc); i++) {
4813 if (!dsi->vc[i].dssdev) {
4814 dsi->vc[i].dssdev = dssdev;
4826 struct dsi_data *dsi = to_dsi_data(dssdev);
4838 if (dsi->vc[channel].dssdev != dssdev) {
4844 dsi->vc[channel].vc_id = vc_id;
4851 struct dsi_data *dsi = to_dsi_data(dssdev);
4854 dsi->vc[channel].dssdev == dssdev) {
4855 dsi->vc[channel].dssdev = NULL;
4856 dsi->vc[channel].vc_id = 0;
4861 static int dsi_get_clocks(struct dsi_data *dsi)
4865 clk = devm_clk_get(dsi->dev, "fck");
4871 dsi->dss_clk = clk;
4893 .dsi = {
5020 static int dsi_init_pll_data(struct dss_device *dss, struct dsi_data *dsi)
5022 struct dss_pll *pll = &dsi->pll;
5026 clk = devm_clk_get(dsi->dev, "sys_clk");
5032 pll->name = dsi->module_id == 0 ? "dsi0" : "dsi1";
5033 pll->id = dsi->module_id == 0 ? DSS_PLL_DSI1 : DSS_PLL_DSI2;
5035 pll->base = dsi->pll_base;
5036 pll->hw = dsi->data->pll_hw;
5053 struct dsi_data *dsi = dev_get_drvdata(dev);
5058 dsi->dss = dss;
5060 dsi_init_pll_data(dss, dsi);
5062 r = dsi_runtime_get(dsi);
5066 rev = dsi_read_reg(dsi, DSI_REVISION);
5070 dsi->line_buffer_size = dsi_get_line_buf_size(dsi);
5072 dsi_runtime_put(dsi);
5074 snprintf(name, sizeof(name), "dsi%u_regs", dsi->module_id + 1);
5075 dsi->debugfs.regs = dss_debugfs_create_file(dss, name,
5076 dsi_dump_dsi_regs, dsi);
5078 snprintf(name, sizeof(name), "dsi%u_irqs", dsi->module_id + 1);
5079 dsi->debugfs.irqs = dss_debugfs_create_file(dss, name,
5080 dsi_dump_dsi_irqs, dsi);
5082 snprintf(name, sizeof(name), "dsi%u_clks", dsi->module_id + 1);
5083 dsi->debugfs.clks = dss_debugfs_create_file(dss, name,
5084 dsi_dump_dsi_clocks, dsi);
5091 struct dsi_data *dsi = dev_get_drvdata(dev);
5093 dss_debugfs_remove_file(dsi->debugfs.clks);
5094 dss_debugfs_remove_file(dsi->debugfs.irqs);
5095 dss_debugfs_remove_file(dsi->debugfs.regs);
5097 WARN_ON(dsi->scp_clk_refcount > 0);
5099 dss_pll_unregister(&dsi->pll);
5111 static int dsi_init_output(struct dsi_data *dsi)
5113 struct omap_dss_device *out = &dsi->output;
5116 out->dev = dsi->dev;
5117 out->id = dsi->module_id == 0 ?
5121 out->name = dsi->module_id == 0 ? "dsi.0" : "dsi.1";
5122 out->dispc_channel = dsi_get_channel(dsi);
5139 static void dsi_uninit_output(struct dsi_data *dsi)
5141 struct omap_dss_device *out = &dsi->output;
5147 static int dsi_probe_of(struct dsi_data *dsi)
5149 struct device_node *node = dsi->dev->of_node;
5163 dev_err(dsi->dev, "failed to find lane data\n");
5171 num_pins > dsi->num_lanes_supported * 2) {
5172 dev_err(dsi->dev, "bad number of lanes\n");
5179 dev_err(dsi->dev, "failed to read lane data\n");
5187 r = dsi_configure_pins(&dsi->output, &pin_cfg);
5189 dev_err(dsi->dev, "failed to configure pins");
5255 { .compatible = "ti,omap3-dsi", .data = &dsi_of_data_omap36xx, },
5256 { .compatible = "ti,omap4-dsi", .data = &dsi_of_data_omap4, },
5257 { .compatible = "ti,omap5-dsi", .data = &dsi_of_data_omap5, },
5272 struct dsi_data *dsi;
5278 dsi = devm_kzalloc(dev, sizeof(*dsi), GFP_KERNEL);
5279 if (!dsi)
5282 dsi->dev = dev;
5283 dev_set_drvdata(dev, dsi);
5285 spin_lock_init(&dsi->irq_lock);
5286 spin_lock_init(&dsi->errors_lock);
5287 dsi->errors = 0;
5290 spin_lock_init(&dsi->irq_stats_lock);
5291 dsi->irq_stats.last_reset = jiffies;
5294 mutex_init(&dsi->lock);
5295 sema_init(&dsi->bus_lock, 1);
5297 INIT_DEFERRABLE_WORK(&dsi->framedone_timeout_work,
5301 timer_setup(&dsi->te_timer, dsi_te_timeout, 0);
5305 dsi->proto_base = devm_ioremap_resource(dev, dsi_mem);
5306 if (IS_ERR(dsi->proto_base))
5307 return PTR_ERR(dsi->proto_base);
5310 dsi->phy_base = devm_ioremap_resource(dev, res);
5311 if (IS_ERR(dsi->phy_base))
5312 return PTR_ERR(dsi->phy_base);
5315 dsi->pll_base = devm_ioremap_resource(dev, res);
5316 if (IS_ERR(dsi->pll_base))
5317 return PTR_ERR(dsi->pll_base);
5319 dsi->irq = platform_get_irq(pdev, 0);
5320 if (dsi->irq < 0) {
5325 r = devm_request_irq(dev, dsi->irq, omap_dsi_irq_handler,
5326 IRQF_SHARED, dev_name(dev), dsi);
5332 dsi->vdds_dsi_reg = devm_regulator_get(dev, "vdd");
5333 if (IS_ERR(dsi->vdds_dsi_reg)) {
5334 if (PTR_ERR(dsi->vdds_dsi_reg) != -EPROBE_DEFER)
5336 return PTR_ERR(dsi->vdds_dsi_reg);
5341 dsi->data = soc->data;
5343 dsi->data = of_match_node(dsi_of_match, dev->of_node)->data;
5345 d = dsi->data->modules;
5354 dsi->module_id = d->id;
5356 if (dsi->data->model == DSI_MODEL_OMAP4 ||
5357 dsi->data->model == DSI_MODEL_OMAP5) {
5365 dsi->data->model == DSI_MODEL_OMAP4 ?
5370 dsi->syscon = syscon_node_to_regmap(np);
5375 for (i = 0; i < ARRAY_SIZE(dsi->vc); i++) {
5376 dsi->vc[i].source = DSI_VC_SOURCE_L4;
5377 dsi->vc[i].dssdev = NULL;
5378 dsi->vc[i].vc_id = 0;
5381 r = dsi_get_clocks(dsi);
5389 if (dsi->data->quirks & DSI_QUIRK_GNQ) {
5390 dsi_runtime_get(dsi);
5392 dsi->num_lanes_supported = 1 + REG_GET(dsi, DSI_GNQ, 11, 9);
5393 dsi_runtime_put(dsi);
5395 dsi->num_lanes_supported = 3;
5404 r = dsi_init_output(dsi);
5408 r = dsi_probe_of(dsi);
5421 dsi_uninit_output(dsi);
5431 struct dsi_data *dsi = platform_get_drvdata(pdev);
5435 dsi_uninit_output(dsi);
5441 if (dsi->vdds_dsi_reg != NULL && dsi->vdds_dsi_enabled) {
5442 regulator_disable(dsi->vdds_dsi_reg);
5443 dsi->vdds_dsi_enabled = false;
5451 struct dsi_data *dsi = dev_get_drvdata(dev);
5453 dsi->is_enabled = false;
5457 synchronize_irq(dsi->irq);
5464 struct dsi_data *dsi = dev_get_drvdata(dev);
5466 dsi->is_enabled = true;