2 * Copyright (c) 2017 Emmanuel Vadot <manu@freebsd.org>
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
14 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
15 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
16 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
17 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
18 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
19 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
20 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
21 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
22 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28 #include <sys/cdefs.h>
29 __FBSDID("$FreeBSD$");
31 #include <sys/param.h>
32 #include <sys/systm.h>
35 #include <dev/extres/clk/clk.h>
37 #include <arm/allwinner/clkng/aw_clk.h>
38 #include <arm/allwinner/clkng/aw_clk_nm.h>
40 #include "clkdev_if.h"
43 * clknode for clocks matching the formula :
52 struct aw_clk_factor m;
53 struct aw_clk_factor n;
54 struct aw_clk_factor prediv;
60 uint32_t lock_retries;
65 #define WRITE4(_clk, off, val) \
66 CLKDEV_WRITE_4(clknode_get_device(_clk), off, val)
67 #define READ4(_clk, off, val) \
68 CLKDEV_READ_4(clknode_get_device(_clk), off, val)
69 #define DEVICE_LOCK(_clk) \
70 CLKDEV_DEVICE_LOCK(clknode_get_device(_clk))
71 #define DEVICE_UNLOCK(_clk) \
72 CLKDEV_DEVICE_UNLOCK(clknode_get_device(_clk))
75 aw_clk_nm_init(struct clknode *clk, device_t dev)
77 struct aw_clk_nm_sc *sc;
80 sc = clknode_get_softc(clk);
83 if ((sc->flags & AW_CLK_HAS_MUX) != 0) {
85 READ4(clk, sc->offset, &val);
88 idx = (val & sc->mux_mask) >> sc->mux_shift;
91 clknode_init_parent_idx(clk, idx);
96 aw_clk_nm_set_gate(struct clknode *clk, bool enable)
98 struct aw_clk_nm_sc *sc;
101 sc = clknode_get_softc(clk);
103 if ((sc->flags & AW_CLK_HAS_GATE) == 0)
107 READ4(clk, sc->offset, &val);
109 val |= (1 << sc->gate_shift);
111 val &= ~(1 << sc->gate_shift);
112 WRITE4(clk, sc->offset, val);
119 aw_clk_nm_set_mux(struct clknode *clk, int index)
121 struct aw_clk_nm_sc *sc;
124 sc = clknode_get_softc(clk);
126 if ((sc->flags & AW_CLK_HAS_MUX) == 0)
130 READ4(clk, sc->offset, &val);
131 val &= ~sc->mux_mask;
132 val |= index << sc->mux_shift;
133 WRITE4(clk, sc->offset, val);
140 aw_clk_nm_find_best(struct aw_clk_nm_sc *sc, uint64_t fparent, uint64_t *fout,
141 uint32_t *factor_n, uint32_t *factor_m)
144 uint32_t m, n, max_m, max_n, min_m, min_n;
146 *factor_n = *factor_m = 0;
148 max_m = aw_clk_factor_get_max(&sc->m);
149 max_n = aw_clk_factor_get_max(&sc->n);
150 min_m = aw_clk_factor_get_min(&sc->m);
151 min_n = aw_clk_factor_get_min(&sc->n);
153 for (m = min_m; m <= max_m; ) {
154 for (n = min_n; n <= max_n; ) {
155 cur = fparent / n / m;
156 if (clk_freq_diff(*fout, cur) <
157 clk_freq_diff(*fout, best)) {
163 if ((sc->n.flags & AW_CLK_FACTOR_POWER_OF_TWO) != 0)
168 if ((sc->m.flags & AW_CLK_FACTOR_POWER_OF_TWO) != 0)
178 aw_clk_nm_set_freq(struct clknode *clk, uint64_t fparent, uint64_t *fout,
179 int flags, int *stop)
181 struct aw_clk_nm_sc *sc;
182 struct clknode *p_clk;
183 const char **p_names;
185 uint32_t val, m, n, best_m, best_n;
186 int p_idx, best_parent, retry;
188 sc = clknode_get_softc(clk);
193 if ((sc->flags & AW_CLK_REPARENT) != 0) {
194 p_names = clknode_get_parent_names(clk);
195 for (p_idx = 0; p_idx != clknode_get_parents_num(clk); p_idx++) {
196 p_clk = clknode_find_by_name(p_names[p_idx]);
197 clknode_get_freq(p_clk, &fparent);
199 cur = aw_clk_nm_find_best(sc, fparent, fout, &n, &m);
200 if (clk_freq_diff(*fout, cur) <
201 clk_freq_diff(*fout, best)) {
209 p_idx = clknode_get_parent_idx(clk);
210 p_clk = clknode_get_parent(clk);
211 clknode_get_freq(p_clk, &fparent);
213 best = aw_clk_nm_find_best(sc, fparent, fout,
217 if ((flags & CLK_SET_DRYRUN) != 0) {
223 if ((best < *fout) &&
224 ((flags & CLK_SET_ROUND_DOWN) == 0)) {
226 printf("best freq (%ju) < requested freq(%ju)\n",
230 if ((best > *fout) &&
231 ((flags & CLK_SET_ROUND_UP) == 0)) {
233 printf("best freq (%ju) > requested freq(%ju)\n",
238 if ((sc->flags & AW_CLK_REPARENT) != 0 && p_idx != best_parent)
239 clknode_set_parent_by_idx(clk, best_parent);
242 READ4(clk, sc->offset, &val);
244 n = aw_clk_factor_get_value(&sc->n, best_n);
245 m = aw_clk_factor_get_value(&sc->m, best_m);
248 val |= n << sc->n.shift;
249 val |= m << sc->m.shift;
251 WRITE4(clk, sc->offset, val);
254 if ((sc->flags & AW_CLK_HAS_LOCK) != 0) {
255 for (retry = 0; retry < sc->lock_retries; retry++) {
256 READ4(clk, sc->offset, &val);
257 if ((val & (1 << sc->lock_shift)) != 0)
270 aw_clk_nm_recalc(struct clknode *clk, uint64_t *freq)
272 struct aw_clk_nm_sc *sc;
273 uint32_t val, m, n, prediv;
275 sc = clknode_get_softc(clk);
278 READ4(clk, sc->offset, &val);
281 m = aw_clk_get_factor(val, &sc->m);
282 n = aw_clk_get_factor(val, &sc->n);
283 if (sc->flags & AW_CLK_HAS_PREDIV)
284 prediv = aw_clk_get_factor(val, &sc->prediv);
288 *freq = *freq / prediv / n / m;
293 static clknode_method_t aw_nm_clknode_methods[] = {
294 /* Device interface */
295 CLKNODEMETHOD(clknode_init, aw_clk_nm_init),
296 CLKNODEMETHOD(clknode_set_gate, aw_clk_nm_set_gate),
297 CLKNODEMETHOD(clknode_set_mux, aw_clk_nm_set_mux),
298 CLKNODEMETHOD(clknode_recalc_freq, aw_clk_nm_recalc),
299 CLKNODEMETHOD(clknode_set_freq, aw_clk_nm_set_freq),
303 DEFINE_CLASS_1(aw_nm_clknode, aw_nm_clknode_class, aw_nm_clknode_methods,
304 sizeof(struct aw_clk_nm_sc), clknode_class);
307 aw_clk_nm_register(struct clkdom *clkdom, struct aw_clk_nm_def *clkdef)
310 struct aw_clk_nm_sc *sc;
312 clk = clknode_create(clkdom, &aw_nm_clknode_class, &clkdef->clkdef);
316 sc = clknode_get_softc(clk);
318 sc->offset = clkdef->offset;
320 sc->m.shift = clkdef->m.shift;
321 sc->m.width = clkdef->m.width;
322 sc->m.mask = ((1 << sc->m.width) - 1) << sc->m.shift;
323 sc->m.value = clkdef->m.value;
324 sc->m.flags = clkdef->m.flags;
326 sc->n.shift = clkdef->n.shift;
327 sc->n.width = clkdef->n.width;
328 sc->n.mask = ((1 << sc->n.width) - 1) << sc->n.shift;
329 sc->n.value = clkdef->n.value;
330 sc->n.flags = clkdef->n.flags;
332 sc->prediv.shift = clkdef->prediv.shift;
333 sc->prediv.width = clkdef->prediv.width;
334 sc->prediv.mask = ((1 << sc->prediv.width) - 1) << sc->prediv.shift;
335 sc->prediv.value = clkdef->prediv.value;
336 sc->prediv.flags = clkdef->prediv.flags;
337 sc->prediv.cond_shift = clkdef->prediv.cond_shift;
338 if (clkdef->prediv.cond_width != 0)
339 sc->prediv.cond_mask = ((1 << clkdef->prediv.cond_width) - 1) << sc->prediv.shift;
341 sc->prediv.cond_mask = clkdef->prediv.cond_mask;
342 sc->prediv.cond_value = clkdef->prediv.cond_value;
344 sc->mux_shift = clkdef->mux_shift;
345 sc->mux_mask = ((1 << clkdef->mux_width) - 1) << sc->mux_shift;
347 sc->gate_shift = clkdef->gate_shift;
349 sc->lock_shift = clkdef->lock_shift;
350 sc->lock_retries = clkdef->lock_retries;
352 sc->flags = clkdef->flags;
354 clknode_register(clkdom, clk);