Lines Matching defs:clk_wzrd
85 * struct clk_wzrd - Clock wizard private data structure
97 struct clk_wzrd {
110 * struct clk_wzrd_divider - clock divider specific to clk_wzrd
117 * @flags: clk_wzrd divider flags
138 #define to_clk_wzrd(_nb) container_of(_nb, struct clk_wzrd, nb)
147 /* spin lock variable for clk_wzrd */
538 struct clk_wzrd *clk_wzrd = to_clk_wzrd(nb);
540 if (clk_wzrd->suspended)
543 if (ndata->clk == clk_wzrd->clk_in1)
544 max = clk_wzrd_max_freq[clk_wzrd->speed_grade - 1];
545 else if (ndata->clk == clk_wzrd->axi_clk)
564 struct clk_wzrd *clk_wzrd = dev_get_drvdata(dev);
566 clk_disable_unprepare(clk_wzrd->axi_clk);
567 clk_wzrd->suspended = true;
575 struct clk_wzrd *clk_wzrd = dev_get_drvdata(dev);
577 ret = clk_prepare_enable(clk_wzrd->axi_clk);
583 clk_wzrd->suspended = false;
598 struct clk_wzrd *clk_wzrd;
604 clk_wzrd = devm_kzalloc(&pdev->dev, sizeof(*clk_wzrd), GFP_KERNEL);
605 if (!clk_wzrd)
607 platform_set_drvdata(pdev, clk_wzrd);
609 clk_wzrd->base = devm_platform_ioremap_resource(pdev, 0);
610 if (IS_ERR(clk_wzrd->base))
611 return PTR_ERR(clk_wzrd->base);
613 ret = of_property_read_u32(np, "xlnx,speed-grade", &clk_wzrd->speed_grade);
615 if (clk_wzrd->speed_grade < 1 || clk_wzrd->speed_grade > 3) {
617 clk_wzrd->speed_grade);
618 clk_wzrd->speed_grade = 0;
622 clk_wzrd->clk_in1 = devm_clk_get(&pdev->dev, "clk_in1");
623 if (IS_ERR(clk_wzrd->clk_in1))
624 return dev_err_probe(&pdev->dev, PTR_ERR(clk_wzrd->clk_in1),
627 clk_wzrd->axi_clk = devm_clk_get(&pdev->dev, "s_axi_aclk");
628 if (IS_ERR(clk_wzrd->axi_clk))
629 return dev_err_probe(&pdev->dev, PTR_ERR(clk_wzrd->axi_clk),
631 ret = clk_prepare_enable(clk_wzrd->axi_clk);
636 rate = clk_get_rate(clk_wzrd->axi_clk);
657 clk_wzrd->clkout[0] = clk_wzrd_register_divider
659 __clk_get_name(clk_wzrd->clk_in1), 0,
660 clk_wzrd->base, WZRD_CLK_CFG_REG(3),
669 reg = readl(clk_wzrd->base + WZRD_CLK_CFG_REG(0));
681 clk_wzrd->clks_internal[wzrd_clk_mul] = clk_register_fixed_factor
683 __clk_get_name(clk_wzrd->clk_in1),
685 if (IS_ERR(clk_wzrd->clks_internal[wzrd_clk_mul])) {
687 ret = PTR_ERR(clk_wzrd->clks_internal[wzrd_clk_mul]);
697 ctrl_reg = clk_wzrd->base + WZRD_CLK_CFG_REG(0);
699 clk_wzrd->clks_internal[wzrd_clk_mul_div] = clk_register_divider
701 __clk_get_name(clk_wzrd->clks_internal[wzrd_clk_mul]),
704 if (IS_ERR(clk_wzrd->clks_internal[wzrd_clk_mul_div])) {
706 ret = PTR_ERR(clk_wzrd->clks_internal[wzrd_clk_mul_div]);
720 clk_wzrd->clkout[i] = clk_wzrd_register_divf
723 clk_wzrd->base, (WZRD_CLK_CFG_REG(2) + i * 12),
729 clk_wzrd->clkout[i] = clk_wzrd_register_divider
732 clk_wzrd->base, (WZRD_CLK_CFG_REG(2) + i * 12),
737 if (IS_ERR(clk_wzrd->clkout[i])) {
741 clk_unregister(clk_wzrd->clkout[j]);
744 ret = PTR_ERR(clk_wzrd->clkout[i]);
750 clk_wzrd->clk_data.clks = clk_wzrd->clkout;
751 clk_wzrd->clk_data.clk_num = ARRAY_SIZE(clk_wzrd->clkout);
752 of_clk_add_provider(np, of_clk_src_onecell_get, &clk_wzrd->clk_data);
754 if (clk_wzrd->speed_grade) {
755 clk_wzrd->nb.notifier_call = clk_wzrd_clk_notifier;
757 ret = clk_notifier_register(clk_wzrd->clk_in1,
758 &clk_wzrd->nb);
763 ret = clk_notifier_register(clk_wzrd->axi_clk, &clk_wzrd->nb);
772 clk_unregister(clk_wzrd->clks_internal[1]);
774 clk_unregister(clk_wzrd->clks_internal[0]);
776 clk_disable_unprepare(clk_wzrd->axi_clk);
784 struct clk_wzrd *clk_wzrd = platform_get_drvdata(pdev);
789 clk_unregister(clk_wzrd->clkout[i]);
791 clk_unregister(clk_wzrd->clks_internal[i]);
793 if (clk_wzrd->speed_grade) {
794 clk_notifier_unregister(clk_wzrd->axi_clk, &clk_wzrd->nb);
795 clk_notifier_unregister(clk_wzrd->clk_in1, &clk_wzrd->nb);
798 clk_disable_unprepare(clk_wzrd->axi_clk);