Lines Matching refs:dd
49 const struct dpll_data *dd;
52 dd = clk->dpll_data;
54 v = ti_clk_ll_ops->clk_readl(&dd->control_reg);
55 v &= ~dd->enable_mask;
56 v |= clken_bits << __ffs(dd->enable_mask);
57 ti_clk_ll_ops->clk_writel(v, &dd->control_reg);
63 const struct dpll_data *dd;
68 dd = clk->dpll_data;
71 state <<= __ffs(dd->idlest_mask);
73 while (((ti_clk_ll_ops->clk_readl(&dd->idlest_reg) & dd->idlest_mask)
140 const struct dpll_data *dd;
147 dd = clk->dpll_data;
148 state <<= __ffs(dd->idlest_mask);
151 if ((ti_clk_ll_ops->clk_readl(&dd->idlest_reg) & dd->idlest_mask) ==
304 struct dpll_data *dd = clk->dpll_data;
317 v = ti_clk_ll_ops->clk_readl(&dd->control_reg);
318 v &= ~dd->freqsel_mask;
319 v |= freqsel << __ffs(dd->freqsel_mask);
320 ti_clk_ll_ops->clk_writel(v, &dd->control_reg);
324 v = ti_clk_ll_ops->clk_readl(&dd->mult_div1_reg);
327 if (dd->dcc_mask) {
328 if (dd->last_rounded_rate >= dd->dcc_rate)
329 v |= dd->dcc_mask; /* Enable DCC */
331 v &= ~dd->dcc_mask; /* Disable DCC */
334 v &= ~(dd->mult_mask | dd->div1_mask);
335 v |= dd->last_rounded_m << __ffs(dd->mult_mask);
336 v |= (dd->last_rounded_n - 1) << __ffs(dd->div1_mask);
339 if (dd->dco_mask) {
340 _lookup_dco(clk, &dco, dd->last_rounded_m, dd->last_rounded_n);
341 v &= ~(dd->dco_mask);
342 v |= dco << __ffs(dd->dco_mask);
344 if (dd->sddiv_mask) {
345 _lookup_sddiv(clk, &sd_div, dd->last_rounded_m,
346 dd->last_rounded_n);
347 v &= ~(dd->sddiv_mask);
348 v |= sd_div << __ffs(dd->sddiv_mask);
370 ti_clk_ll_ops->clk_writel(v, &dd->mult_div1_reg);
373 if (dd->m4xen_mask || dd->lpmode_mask) {
374 v = ti_clk_ll_ops->clk_readl(&dd->control_reg);
376 if (dd->m4xen_mask) {
377 if (dd->last_rounded_m4xen)
378 v |= dd->m4xen_mask;
380 v &= ~dd->m4xen_mask;
383 if (dd->lpmode_mask) {
384 if (dd->last_rounded_lpmode)
385 v |= dd->lpmode_mask;
387 v &= ~dd->lpmode_mask;
390 ti_clk_ll_ops->clk_writel(v, &dd->control_reg);
440 struct dpll_data *dd;
443 dd = clk->dpll_data;
444 if (!dd)
460 if (clk_hw_get_rate(hw) == clk_hw_get_rate(dd->clk_bypass)) {
461 WARN_ON(parent != dd->clk_bypass);
464 WARN_ON(parent != dd->clk_ref);
503 struct dpll_data *dd;
508 dd = clk->dpll_data;
509 if (!dd)
512 if (clk_hw_get_rate(dd->clk_bypass) == req->rate &&
513 (dd->modes & (1 << DPLL_LOW_POWER_BYPASS))) {
514 req->best_parent_hw = dd->clk_bypass;
518 req->best_parent_hw = dd->clk_ref;
565 struct dpll_data *dd;
572 dd = clk->dpll_data;
573 if (!dd)
576 if (clk_hw_get_parent(hw) != dd->clk_ref)
579 if (dd->last_rounded_rate == 0)
584 freqsel = _omap3_dpll_compute_freqsel(clk, dd->last_rounded_n);
644 const struct dpll_data *dd;
650 dd = clk->dpll_data;
652 if (!dd->autoidle_mask)
655 v = ti_clk_ll_ops->clk_readl(&dd->autoidle_reg);
656 v &= dd->autoidle_mask;
657 v >>= __ffs(dd->autoidle_mask);
673 const struct dpll_data *dd;
679 dd = clk->dpll_data;
681 if (!dd->autoidle_mask)
689 v = ti_clk_ll_ops->clk_readl(&dd->autoidle_reg);
690 v &= ~dd->autoidle_mask;
691 v |= DPLL_AUTOIDLE_LOW_POWER_STOP << __ffs(dd->autoidle_mask);
692 ti_clk_ll_ops->clk_writel(v, &dd->autoidle_reg);
703 const struct dpll_data *dd;
709 dd = clk->dpll_data;
711 if (!dd->autoidle_mask)
714 v = ti_clk_ll_ops->clk_readl(&dd->autoidle_reg);
715 v &= ~dd->autoidle_mask;
716 v |= DPLL_AUTOIDLE_DISABLE << __ffs(dd->autoidle_mask);
717 ti_clk_ll_ops->clk_writel(v, &dd->autoidle_reg);
756 const struct dpll_data *dd;
769 dd = pclk->dpll_data;
771 WARN_ON(!dd->enable_mask);
773 v = ti_clk_ll_ops->clk_readl(&dd->control_reg) & dd->enable_mask;
774 v >>= __ffs(dd->enable_mask);
775 if ((v != OMAP3XXX_EN_DPLL_LOCKED) || (dd->flags & DPLL_J_TYPE))
792 struct dpll_data *dd;
795 dd = clk->dpll_data;
797 v = ti_clk_ll_ops->clk_readl(&dd->control_reg);
798 clk->context = (v & dd->enable_mask) >> __ffs(dd->enable_mask);
801 v = ti_clk_ll_ops->clk_readl(&dd->mult_div1_reg);
802 dd->last_rounded_m = (v & dd->mult_mask) >>
803 __ffs(dd->mult_mask);
804 dd->last_rounded_n = ((v & dd->div1_mask) >>
805 __ffs(dd->div1_mask)) + 1;
821 const struct dpll_data *dd;
824 dd = clk->dpll_data;
830 v = ti_clk_ll_ops->clk_readl(&dd->mult_div1_reg);
831 v &= ~(dd->mult_mask | dd->div1_mask);
832 v |= dd->last_rounded_m << __ffs(dd->mult_mask);
833 v |= (dd->last_rounded_n - 1) << __ffs(dd->div1_mask);
834 ti_clk_ll_ops->clk_writel(v, &dd->mult_div1_reg);
853 struct dpll_data *dd;
856 dd = clk->dpll_data;
858 v = ti_clk_ll_ops->clk_readl(&dd->control_reg);
859 clk->context = (v & dd->enable_mask) >> __ffs(dd->enable_mask);
862 v = ti_clk_ll_ops->clk_readl(&dd->mult_div1_reg);
863 dd->last_rounded_m = (v & dd->mult_mask) >>
864 __ffs(dd->mult_mask);
865 dd->last_rounded_n = ((v & dd->div1_mask) >>
866 __ffs(dd->div1_mask)) + 1;
882 const struct dpll_data *dd;
885 dd = clk->dpll_data;
887 ctrl = ti_clk_ll_ops->clk_readl(&dd->control_reg);
888 mult_div1 = ti_clk_ll_ops->clk_readl(&dd->mult_div1_reg);
890 if (clk->context == ((ctrl & dd->enable_mask) >>
891 __ffs(dd->enable_mask)) &&
892 dd->last_rounded_m == ((mult_div1 & dd->mult_mask) >>
893 __ffs(dd->mult_mask)) &&
894 dd->last_rounded_n == ((mult_div1 & dd->div1_mask) >>
895 __ffs(dd->div1_mask)) + 1) {
987 struct dpll_data *dd;
1001 dd = clk->dpll_data;
1002 dd->last_rounded_m = d->m;
1003 dd->last_rounded_n = d->n;
1004 dd->last_rounded_rate = div_u64((u64)parent_rate * d->m, d->n);