Lines Matching refs:div
58 static inline u64 scaled_div_value(struct bcm_clk_div *div, u32 reg_div)
60 return (u64)reg_div + ((u64)1 << div->u.s.frac_width);
68 u64 scaled_div_build(struct bcm_clk_div *div, u32 div_value, u32 billionths)
76 combined <<= div->u.s.frac_width;
83 scaled_div_min(struct bcm_clk_div *div)
85 if (divider_is_fixed(div))
86 return (u64)div->u.fixed;
88 return scaled_div_value(div, 0);
92 u64 scaled_div_max(struct bcm_clk_div *div)
96 if (divider_is_fixed(div))
97 return (u64)div->u.fixed;
99 reg_div = ((u32)1 << div->u.s.width) - 1;
101 return scaled_div_value(div, reg_div);
109 divider(struct bcm_clk_div *div, u64 scaled_div)
111 BUG_ON(scaled_div < scaled_div_min(div));
112 BUG_ON(scaled_div > scaled_div_max(div));
114 return (u32)(scaled_div - ((u64)1 << div->u.s.frac_width));
119 scale_rate(struct bcm_clk_div *div, u32 rate)
121 if (divider_is_fixed(div))
124 return (u64)rate << div->u.s.frac_width;
564 static u64 divider_read_scaled(struct ccu_data *ccu, struct bcm_clk_div *div)
570 if (divider_is_fixed(div))
571 return (u64)div->u.fixed;
574 reg_val = __ccu_read(ccu, div->u.s.offset);
578 reg_div = bitfield_extract(reg_val, div->u.s.shift, div->u.s.width);
581 return scaled_div_value(div, reg_div);
592 struct bcm_clk_div *div, struct bcm_clk_trig *trig)
599 BUG_ON(divider_is_fixed(div));
606 if (div->u.s.scaled_div == BAD_SCALED_DIV_VALUE) {
607 reg_val = __ccu_read(ccu, div->u.s.offset);
608 reg_div = bitfield_extract(reg_val, div->u.s.shift,
609 div->u.s.width);
610 div->u.s.scaled_div = scaled_div_value(div, reg_div);
616 reg_div = divider(div, div->u.s.scaled_div);
626 reg_val = __ccu_read(ccu, div->u.s.offset);
627 reg_val = bitfield_replace(reg_val, div->u.s.shift, div->u.s.width,
629 __ccu_write(ccu, div->u.s.offset, reg_val);
648 struct bcm_clk_div *div, struct bcm_clk_trig *trig)
650 if (!divider_exists(div) || divider_is_fixed(div))
652 return !__div_commit(ccu, gate, div, trig);
656 struct bcm_clk_div *div, struct bcm_clk_trig *trig,
663 BUG_ON(divider_is_fixed(div));
665 previous = div->u.s.scaled_div;
669 div->u.s.scaled_div = scaled_div;
674 ret = __div_commit(ccu, gate, div, trig);
680 div->u.s.scaled_div = previous; /* Revert the change */
694 struct bcm_clk_div *div, struct bcm_clk_div *pre_div,
701 if (!divider_exists(div))
720 scaled_rate = scale_rate(div, scaled_rate);
725 scaled_parent_rate = scale_rate(div, parent_rate);
733 scaled_div = divider_read_scaled(ccu, div);
748 static long round_rate(struct ccu_data *ccu, struct bcm_clk_div *div,
759 BUG_ON(!divider_exists(div));
779 scaled_rate = scale_rate(div, scaled_rate);
784 scaled_parent_rate = scale_rate(div, parent_rate);
792 if (!divider_is_fixed(div)) {
795 min_scaled_div = scaled_div_min(div);
796 max_scaled_div = scaled_div_max(div);
802 best_scaled_div = divider_read_scaled(ccu, div);
1004 return clk_recalc_rate(bcm_clk->ccu, &data->div, &data->pre_div,
1012 struct bcm_clk_div *div = &bcm_clk->u.peri->div;
1014 if (!divider_exists(div))
1018 return round_rate(bcm_clk->ccu, div, &bcm_clk->u.peri->pre_div,
1135 struct bcm_clk_div *div = &data->div;
1145 if (!divider_exists(div))
1153 if (divider_is_fixed(&data->div))
1161 (void)round_rate(bcm_clk->ccu, div, &data->pre_div,
1168 ret = divider_write(bcm_clk->ccu, &data->gate, &data->div,
1216 if (!div_init(ccu, &peri->gate, &peri->div, &peri->trig)) {