Lines Matching refs:div
50 static inline u64 scaled_div_value(struct bcm_clk_div *div, u32 reg_div)
52 return (u64)reg_div + ((u64)1 << div->u.s.frac_width);
60 u64 scaled_div_build(struct bcm_clk_div *div, u32 div_value, u32 billionths)
68 combined <<= div->u.s.frac_width;
75 scaled_div_min(struct bcm_clk_div *div)
77 if (divider_is_fixed(div))
78 return (u64)div->u.fixed;
80 return scaled_div_value(div, 0);
84 u64 scaled_div_max(struct bcm_clk_div *div)
88 if (divider_is_fixed(div))
89 return (u64)div->u.fixed;
91 reg_div = ((u32)1 << div->u.s.width) - 1;
93 return scaled_div_value(div, reg_div);
101 divider(struct bcm_clk_div *div, u64 scaled_div)
103 BUG_ON(scaled_div < scaled_div_min(div));
104 BUG_ON(scaled_div > scaled_div_max(div));
106 return (u32)(scaled_div - ((u64)1 << div->u.s.frac_width));
111 scale_rate(struct bcm_clk_div *div, u32 rate)
113 if (divider_is_fixed(div))
116 return (u64)rate << div->u.s.frac_width;
556 static u64 divider_read_scaled(struct ccu_data *ccu, struct bcm_clk_div *div)
562 if (divider_is_fixed(div))
563 return (u64)div->u.fixed;
566 reg_val = __ccu_read(ccu, div->u.s.offset);
570 reg_div = bitfield_extract(reg_val, div->u.s.shift, div->u.s.width);
573 return scaled_div_value(div, reg_div);
584 struct bcm_clk_div *div, struct bcm_clk_trig *trig)
591 BUG_ON(divider_is_fixed(div));
598 if (div->u.s.scaled_div == BAD_SCALED_DIV_VALUE) {
599 reg_val = __ccu_read(ccu, div->u.s.offset);
600 reg_div = bitfield_extract(reg_val, div->u.s.shift,
601 div->u.s.width);
602 div->u.s.scaled_div = scaled_div_value(div, reg_div);
608 reg_div = divider(div, div->u.s.scaled_div);
618 reg_val = __ccu_read(ccu, div->u.s.offset);
619 reg_val = bitfield_replace(reg_val, div->u.s.shift, div->u.s.width,
621 __ccu_write(ccu, div->u.s.offset, reg_val);
640 struct bcm_clk_div *div, struct bcm_clk_trig *trig)
642 if (!divider_exists(div) || divider_is_fixed(div))
644 return !__div_commit(ccu, gate, div, trig);
648 struct bcm_clk_div *div, struct bcm_clk_trig *trig,
655 BUG_ON(divider_is_fixed(div));
657 previous = div->u.s.scaled_div;
661 div->u.s.scaled_div = scaled_div;
666 ret = __div_commit(ccu, gate, div, trig);
672 div->u.s.scaled_div = previous; /* Revert the change */
686 struct bcm_clk_div *div, struct bcm_clk_div *pre_div,
693 if (!divider_exists(div))
712 scaled_rate = scale_rate(div, scaled_rate);
717 scaled_parent_rate = scale_rate(div, parent_rate);
725 scaled_div = divider_read_scaled(ccu, div);
740 static long round_rate(struct ccu_data *ccu, struct bcm_clk_div *div,
751 BUG_ON(!divider_exists(div));
771 scaled_rate = scale_rate(div, scaled_rate);
776 scaled_parent_rate = scale_rate(div, parent_rate);
784 if (!divider_is_fixed(div)) {
787 min_scaled_div = scaled_div_min(div);
788 max_scaled_div = scaled_div_max(div);
794 best_scaled_div = divider_read_scaled(ccu, div);
996 return clk_recalc_rate(bcm_clk->ccu, &data->div, &data->pre_div,
1004 struct bcm_clk_div *div = &bcm_clk->u.peri->div;
1006 if (!divider_exists(div))
1010 return round_rate(bcm_clk->ccu, div, &bcm_clk->u.peri->pre_div,
1127 struct bcm_clk_div *div = &data->div;
1137 if (!divider_exists(div))
1145 if (divider_is_fixed(&data->div))
1153 (void)round_rate(bcm_clk->ccu, div, &data->pre_div,
1160 ret = divider_write(bcm_clk->ccu, &data->gate, &data->div,
1208 if (!div_init(ccu, &peri->gate, &peri->div, &peri->trig)) {