Lines Matching refs:dd

49 	const struct dpll_data *dd;
52 dd = clk->dpll_data;
54 v = ti_clk_ll_ops->clk_readl(&dd->control_reg);
55 v &= ~dd->enable_mask;
56 v |= clken_bits << __ffs(dd->enable_mask);
57 ti_clk_ll_ops->clk_writel(v, &dd->control_reg);
63 const struct dpll_data *dd;
68 dd = clk->dpll_data;
71 state <<= __ffs(dd->idlest_mask);
73 while (((ti_clk_ll_ops->clk_readl(&dd->idlest_reg) & dd->idlest_mask)
140 const struct dpll_data *dd;
147 dd = clk->dpll_data;
148 state <<= __ffs(dd->idlest_mask);
151 if ((ti_clk_ll_ops->clk_readl(&dd->idlest_reg) & dd->idlest_mask) ==
303 struct dpll_data *dd = clk->dpll_data;
308 ctrl = ti_clk_ll_ops->clk_readl(&dd->control_reg);
310 if (dd->ssc_modfreq && dd->ssc_deltam) {
311 ctrl |= dd->ssc_enable_mask;
313 if (dd->ssc_downspread)
314 ctrl |= dd->ssc_downspread_mask;
316 ctrl &= ~dd->ssc_downspread_mask;
318 ref_rate = clk_hw_get_rate(dd->clk_ref);
320 (ref_rate / dd->last_rounded_n) / (4 * dd->ssc_modfreq);
321 if (dd->ssc_modfreq > (ref_rate / 70))
334 v = ti_clk_ll_ops->clk_readl(&dd->ssc_modfreq_reg);
335 v &= ~(dd->ssc_modfreq_mant_mask | dd->ssc_modfreq_exp_mask);
336 v |= mantissa << __ffs(dd->ssc_modfreq_mant_mask);
337 v |= exponent << __ffs(dd->ssc_modfreq_exp_mask);
338 ti_clk_ll_ops->clk_writel(v, &dd->ssc_modfreq_reg);
340 deltam_step = dd->last_rounded_m * dd->ssc_deltam;
342 if (dd->ssc_downspread)
345 deltam_step <<= __ffs(dd->ssc_deltam_int_mask);
351 deltam_ceil = (deltam_step & dd->ssc_deltam_int_mask) >>
352 __ffs(dd->ssc_deltam_int_mask);
353 if (deltam_step & dd->ssc_deltam_frac_mask)
356 if ((dd->ssc_downspread &&
357 ((dd->last_rounded_m - (2 * deltam_ceil)) < 20 ||
358 dd->last_rounded_m > 2045)) ||
359 ((dd->last_rounded_m - deltam_ceil) < 20 ||
360 (dd->last_rounded_m + deltam_ceil) > 2045))
364 v = ti_clk_ll_ops->clk_readl(&dd->ssc_deltam_reg);
365 v &= ~(dd->ssc_deltam_int_mask | dd->ssc_deltam_frac_mask);
366 v |= deltam_step << __ffs(dd->ssc_deltam_int_mask |
367 dd->ssc_deltam_frac_mask);
368 ti_clk_ll_ops->clk_writel(v, &dd->ssc_deltam_reg);
370 ctrl &= ~dd->ssc_enable_mask;
373 ti_clk_ll_ops->clk_writel(ctrl, &dd->control_reg);
386 struct dpll_data *dd = clk->dpll_data;
399 v = ti_clk_ll_ops->clk_readl(&dd->control_reg);
400 v &= ~dd->freqsel_mask;
401 v |= freqsel << __ffs(dd->freqsel_mask);
402 ti_clk_ll_ops->clk_writel(v, &dd->control_reg);
406 v = ti_clk_ll_ops->clk_readl(&dd->mult_div1_reg);
409 if (dd->dcc_mask) {
410 if (dd->last_rounded_rate >= dd->dcc_rate)
411 v |= dd->dcc_mask; /* Enable DCC */
413 v &= ~dd->dcc_mask; /* Disable DCC */
416 v &= ~(dd->mult_mask | dd->div1_mask);
417 v |= dd->last_rounded_m << __ffs(dd->mult_mask);
418 v |= (dd->last_rounded_n - 1) << __ffs(dd->div1_mask);
421 if (dd->dco_mask) {
422 _lookup_dco(clk, &dco, dd->last_rounded_m, dd->last_rounded_n);
423 v &= ~(dd->dco_mask);
424 v |= dco << __ffs(dd->dco_mask);
426 if (dd->sddiv_mask) {
427 _lookup_sddiv(clk, &sd_div, dd->last_rounded_m,
428 dd->last_rounded_n);
429 v &= ~(dd->sddiv_mask);
430 v |= sd_div << __ffs(dd->sddiv_mask);
452 ti_clk_ll_ops->clk_writel(v, &dd->mult_div1_reg);
455 if (dd->m4xen_mask || dd->lpmode_mask) {
456 v = ti_clk_ll_ops->clk_readl(&dd->control_reg);
458 if (dd->m4xen_mask) {
459 if (dd->last_rounded_m4xen)
460 v |= dd->m4xen_mask;
462 v &= ~dd->m4xen_mask;
465 if (dd->lpmode_mask) {
466 if (dd->last_rounded_lpmode)
467 v |= dd->lpmode_mask;
469 v &= ~dd->lpmode_mask;
472 ti_clk_ll_ops->clk_writel(v, &dd->control_reg);
475 if (dd->ssc_enable_mask)
526 struct dpll_data *dd;
529 dd = clk->dpll_data;
530 if (!dd)
546 if (clk_hw_get_rate(hw) == clk_hw_get_rate(dd->clk_bypass)) {
547 WARN_ON(parent != dd->clk_bypass);
550 WARN_ON(parent != dd->clk_ref);
589 struct dpll_data *dd;
594 dd = clk->dpll_data;
595 if (!dd)
598 if (clk_hw_get_rate(dd->clk_bypass) == req->rate &&
599 (dd->modes & (1 << DPLL_LOW_POWER_BYPASS))) {
600 req->best_parent_hw = dd->clk_bypass;
604 req->best_parent_hw = dd->clk_ref;
651 struct dpll_data *dd;
658 dd = clk->dpll_data;
659 if (!dd)
662 if (clk_hw_get_parent(hw) != dd->clk_ref)
665 if (dd->last_rounded_rate == 0)
670 freqsel = _omap3_dpll_compute_freqsel(clk, dd->last_rounded_n);
730 const struct dpll_data *dd;
736 dd = clk->dpll_data;
738 if (!dd->autoidle_mask)
741 v = ti_clk_ll_ops->clk_readl(&dd->autoidle_reg);
742 v &= dd->autoidle_mask;
743 v >>= __ffs(dd->autoidle_mask);
759 const struct dpll_data *dd;
765 dd = clk->dpll_data;
767 if (!dd->autoidle_mask)
775 v = ti_clk_ll_ops->clk_readl(&dd->autoidle_reg);
776 v &= ~dd->autoidle_mask;
777 v |= DPLL_AUTOIDLE_LOW_POWER_STOP << __ffs(dd->autoidle_mask);
778 ti_clk_ll_ops->clk_writel(v, &dd->autoidle_reg);
789 const struct dpll_data *dd;
795 dd = clk->dpll_data;
797 if (!dd->autoidle_mask)
800 v = ti_clk_ll_ops->clk_readl(&dd->autoidle_reg);
801 v &= ~dd->autoidle_mask;
802 v |= DPLL_AUTOIDLE_DISABLE << __ffs(dd->autoidle_mask);
803 ti_clk_ll_ops->clk_writel(v, &dd->autoidle_reg);
843 const struct dpll_data *dd;
856 dd = pclk->dpll_data;
858 WARN_ON(!dd->enable_mask);
860 v = ti_clk_ll_ops->clk_readl(&dd->control_reg) & dd->enable_mask;
861 v >>= __ffs(dd->enable_mask);
862 if ((v != OMAP3XXX_EN_DPLL_LOCKED) || (dd->flags & DPLL_J_TYPE))
879 struct dpll_data *dd;
882 dd = clk->dpll_data;
884 v = ti_clk_ll_ops->clk_readl(&dd->control_reg);
885 clk->context = (v & dd->enable_mask) >> __ffs(dd->enable_mask);
888 v = ti_clk_ll_ops->clk_readl(&dd->mult_div1_reg);
889 dd->last_rounded_m = (v & dd->mult_mask) >>
890 __ffs(dd->mult_mask);
891 dd->last_rounded_n = ((v & dd->div1_mask) >>
892 __ffs(dd->div1_mask)) + 1;
908 const struct dpll_data *dd;
911 dd = clk->dpll_data;
917 v = ti_clk_ll_ops->clk_readl(&dd->mult_div1_reg);
918 v &= ~(dd->mult_mask | dd->div1_mask);
919 v |= dd->last_rounded_m << __ffs(dd->mult_mask);
920 v |= (dd->last_rounded_n - 1) << __ffs(dd->div1_mask);
921 ti_clk_ll_ops->clk_writel(v, &dd->mult_div1_reg);
940 struct dpll_data *dd;
943 dd = clk->dpll_data;
945 v = ti_clk_ll_ops->clk_readl(&dd->control_reg);
946 clk->context = (v & dd->enable_mask) >> __ffs(dd->enable_mask);
949 v = ti_clk_ll_ops->clk_readl(&dd->mult_div1_reg);
950 dd->last_rounded_m = (v & dd->mult_mask) >>
951 __ffs(dd->mult_mask);
952 dd->last_rounded_n = ((v & dd->div1_mask) >>
953 __ffs(dd->div1_mask)) + 1;
969 const struct dpll_data *dd;
972 dd = clk->dpll_data;
974 ctrl = ti_clk_ll_ops->clk_readl(&dd->control_reg);
975 mult_div1 = ti_clk_ll_ops->clk_readl(&dd->mult_div1_reg);
977 if (clk->context == ((ctrl & dd->enable_mask) >>
978 __ffs(dd->enable_mask)) &&
979 dd->last_rounded_m == ((mult_div1 & dd->mult_mask) >>
980 __ffs(dd->mult_mask)) &&
981 dd->last_rounded_n == ((mult_div1 & dd->div1_mask) >>
982 __ffs(dd->div1_mask)) + 1) {
1074 struct dpll_data *dd;
1088 dd = clk->dpll_data;
1089 dd->last_rounded_m = d->m;
1090 dd->last_rounded_n = d->n;
1091 dd->last_rounded_rate = div_u64((u64)parent_rate * d->m, d->n);