Lines Matching refs:div

61 						  unsigned long div)  in ccu_div_lock_delay_ns()  argument
63 u64 ns = 4ULL * (div ?: 1) * NSEC_PER_SEC; in ccu_div_lock_delay_ns()
71 unsigned long div) in ccu_div_calc_freq() argument
73 return ref_clk / (div ?: 1); in ccu_div_calc_freq()
76 static int ccu_div_var_update_clkdiv(struct ccu_div *div, in ccu_div_var_update_clkdiv() argument
87 if (div->features & CCU_DIV_LOCK_SHIFTED) in ccu_div_var_update_clkdiv()
92 regmap_update_bits(div->sys_regs, div->reg_ctl, in ccu_div_var_update_clkdiv()
102 regmap_read(div->sys_regs, div->reg_ctl, &val); in ccu_div_var_update_clkdiv()
113 struct ccu_div *div = to_ccu_div(hw); in ccu_div_var_enable() local
123 regmap_read(div->sys_regs, div->reg_ctl, &val); in ccu_div_var_enable()
127 spin_lock_irqsave(&div->lock, flags); in ccu_div_var_enable()
128 ret = ccu_div_var_update_clkdiv(div, clk_hw_get_rate(parent_hw), in ccu_div_var_enable()
129 ccu_div_get(div->mask, val)); in ccu_div_var_enable()
131 regmap_update_bits(div->sys_regs, div->reg_ctl, in ccu_div_var_enable()
133 spin_unlock_irqrestore(&div->lock, flags); in ccu_div_var_enable()
142 struct ccu_div *div = to_ccu_div(hw); in ccu_div_gate_enable() local
145 spin_lock_irqsave(&div->lock, flags); in ccu_div_gate_enable()
146 regmap_update_bits(div->sys_regs, div->reg_ctl, in ccu_div_gate_enable()
148 spin_unlock_irqrestore(&div->lock, flags); in ccu_div_gate_enable()
155 struct ccu_div *div = to_ccu_div(hw); in ccu_div_gate_disable() local
158 spin_lock_irqsave(&div->lock, flags); in ccu_div_gate_disable()
159 regmap_update_bits(div->sys_regs, div->reg_ctl, CCU_DIV_CTL_EN, 0); in ccu_div_gate_disable()
160 spin_unlock_irqrestore(&div->lock, flags); in ccu_div_gate_disable()
165 struct ccu_div *div = to_ccu_div(hw); in ccu_div_gate_is_enabled() local
168 regmap_read(div->sys_regs, div->reg_ctl, &val); in ccu_div_gate_is_enabled()
176 struct ccu_div *div = to_ccu_div(hw); in ccu_div_var_recalc_rate() local
180 regmap_read(div->sys_regs, div->reg_ctl, &val); in ccu_div_var_recalc_rate()
181 divider = ccu_div_get(div->mask, val); in ccu_div_var_recalc_rate()
200 struct ccu_div *div = to_ccu_div(hw); in ccu_div_var_round_rate() local
203 divider = ccu_div_var_calc_divider(rate, *parent_rate, div->mask); in ccu_div_var_round_rate()
216 struct ccu_div *div = to_ccu_div(hw); in ccu_div_var_set_rate_slow() local
221 divider = ccu_div_var_calc_divider(rate, parent_rate, div->mask); in ccu_div_var_set_rate_slow()
222 if (divider == 1 && div->features & CCU_DIV_SKIP_ONE) { in ccu_div_var_set_rate_slow()
224 } else if (div->features & CCU_DIV_SKIP_ONE_TO_THREE) { in ccu_div_var_set_rate_slow()
231 val = ccu_div_prep(div->mask, divider); in ccu_div_var_set_rate_slow()
233 spin_lock_irqsave(&div->lock, flags); in ccu_div_var_set_rate_slow()
234 regmap_update_bits(div->sys_regs, div->reg_ctl, div->mask, val); in ccu_div_var_set_rate_slow()
235 ret = ccu_div_var_update_clkdiv(div, parent_rate, divider); in ccu_div_var_set_rate_slow()
236 spin_unlock_irqrestore(&div->lock, flags); in ccu_div_var_set_rate_slow()
250 struct ccu_div *div = to_ccu_div(hw); in ccu_div_var_set_rate_fast() local
254 divider = ccu_div_var_calc_divider(rate, parent_rate, div->mask); in ccu_div_var_set_rate_fast()
255 val = ccu_div_prep(div->mask, divider); in ccu_div_var_set_rate_fast()
261 spin_lock_irqsave(&div->lock, flags); in ccu_div_var_set_rate_fast()
262 regmap_update_bits(div->sys_regs, div->reg_ctl, in ccu_div_var_set_rate_fast()
263 div->mask | CCU_DIV_CTL_EN, val); in ccu_div_var_set_rate_fast()
264 spin_unlock_irqrestore(&div->lock, flags); in ccu_div_var_set_rate_fast()
272 struct ccu_div *div = to_ccu_div(hw); in ccu_div_fixed_recalc_rate() local
274 return ccu_div_calc_freq(parent_rate, div->divider); in ccu_div_fixed_recalc_rate()
280 struct ccu_div *div = to_ccu_div(hw); in ccu_div_fixed_round_rate() local
282 return ccu_div_calc_freq(*parent_rate, div->divider); in ccu_div_fixed_round_rate()
291 int ccu_div_reset_domain(struct ccu_div *div) in ccu_div_reset_domain() argument
295 if (!div || !(div->features & CCU_DIV_RESET_DOMAIN)) in ccu_div_reset_domain()
298 spin_lock_irqsave(&div->lock, flags); in ccu_div_reset_domain()
299 regmap_update_bits(div->sys_regs, div->reg_ctl, in ccu_div_reset_domain()
301 spin_unlock_irqrestore(&div->lock, flags); in ccu_div_reset_domain()
312 struct ccu_div *div; member
342 struct ccu_div *div = bit->div; in ccu_div_dbgfs_bit_set() local
345 spin_lock_irqsave(&div->lock, flags); in ccu_div_dbgfs_bit_set()
346 regmap_update_bits(div->sys_regs, div->reg_ctl, in ccu_div_dbgfs_bit_set()
348 spin_unlock_irqrestore(&div->lock, flags); in ccu_div_dbgfs_bit_set()
355 struct ccu_div *div = priv; in ccu_div_dbgfs_var_clkdiv_set() local
360 CCU_DIV_CLKDIV_MAX(div->mask)); in ccu_div_dbgfs_var_clkdiv_set()
361 data = ccu_div_prep(div->mask, val); in ccu_div_dbgfs_var_clkdiv_set()
363 spin_lock_irqsave(&div->lock, flags); in ccu_div_dbgfs_var_clkdiv_set()
364 regmap_update_bits(div->sys_regs, div->reg_ctl, div->mask, data); in ccu_div_dbgfs_var_clkdiv_set()
365 spin_unlock_irqrestore(&div->lock, flags); in ccu_div_dbgfs_var_clkdiv_set()
383 struct ccu_div *div = bit->div; in ccu_div_dbgfs_bit_get() local
386 regmap_read(div->sys_regs, div->reg_ctl, &data); in ccu_div_dbgfs_bit_get()
396 struct ccu_div *div = priv; in ccu_div_dbgfs_var_clkdiv_get() local
399 regmap_read(div->sys_regs, div->reg_ctl, &data); in ccu_div_dbgfs_var_clkdiv_get()
400 *val = ccu_div_get(div->mask, data); in ccu_div_dbgfs_var_clkdiv_get()
409 struct ccu_div *div = priv; in ccu_div_dbgfs_fixed_clkdiv_get() local
411 *val = div->divider; in ccu_div_dbgfs_fixed_clkdiv_get()
420 struct ccu_div *div = to_ccu_div(hw); in ccu_div_var_debug_init() local
425 num += !!(div->flags & CLK_SET_RATE_GATE) + in ccu_div_var_debug_init()
426 !!(div->features & CCU_DIV_RESET_DOMAIN); in ccu_div_var_debug_init()
434 if (!(div->flags & CLK_SET_RATE_GATE) && in ccu_div_var_debug_init()
439 if (!(div->features & CCU_DIV_RESET_DOMAIN) && in ccu_div_var_debug_init()
445 bits[didx].div = div; in ccu_div_var_debug_init()
447 if (div->features & CCU_DIV_LOCK_SHIFTED && in ccu_div_var_debug_init()
459 div, &ccu_div_dbgfs_var_clkdiv_fops); in ccu_div_var_debug_init()
464 struct ccu_div *div = to_ccu_div(hw); in ccu_div_gate_debug_init() local
472 bit->div = div; in ccu_div_gate_debug_init()
476 debugfs_create_file_unsafe("div_clkdiv", 0400, dentry, div, in ccu_div_gate_debug_init()
482 struct ccu_div *div = to_ccu_div(hw); in ccu_div_fixed_debug_init() local
484 debugfs_create_file_unsafe("div_clkdiv", 0400, dentry, div, in ccu_div_fixed_debug_init()
534 struct ccu_div *div; in ccu_div_hw_register() local
540 div = kzalloc(sizeof(*div), GFP_KERNEL); in ccu_div_hw_register()
541 if (!div) in ccu_div_hw_register()
549 div->hw.init = &hw_init; in ccu_div_hw_register()
550 div->id = div_init->id; in ccu_div_hw_register()
551 div->reg_ctl = div_init->base + CCU_DIV_CTL; in ccu_div_hw_register()
552 div->sys_regs = div_init->sys_regs; in ccu_div_hw_register()
553 div->flags = div_init->flags; in ccu_div_hw_register()
554 div->features = div_init->features; in ccu_div_hw_register()
555 spin_lock_init(&div->lock); in ccu_div_hw_register()
565 div->mask = CCU_DIV_CTL_CLKDIV_MASK(div_init->width); in ccu_div_hw_register()
568 div->divider = div_init->divider; in ccu_div_hw_register()
571 div->divider = div_init->divider; in ccu_div_hw_register()
585 ret = of_clk_hw_register(div_init->np, &div->hw); in ccu_div_hw_register()
589 return div; in ccu_div_hw_register()
592 kfree(div); in ccu_div_hw_register()
597 void ccu_div_hw_unregister(struct ccu_div *div) in ccu_div_hw_unregister() argument
599 clk_hw_unregister(&div->hw); in ccu_div_hw_unregister()
601 kfree(div); in ccu_div_hw_unregister()