2 * Copyright (c) 2017 Emmanuel Vadot <manu@freebsd.org>
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
15 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
16 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
17 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
18 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
19 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
20 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
21 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
22 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29 #include <sys/cdefs.h>
30 __FBSDID("$FreeBSD$");
32 #include <sys/param.h>
33 #include <sys/systm.h>
36 #include <dev/extres/clk/clk.h>
38 #include <arm/allwinner/clkng/aw_clk.h>
39 #include <arm/allwinner/clkng/aw_clk_nm.h>
41 #include "clkdev_if.h"
44 * clknode for clocks matching the formula :
53 struct aw_clk_factor m;
54 struct aw_clk_factor n;
55 struct aw_clk_factor prediv;
56 struct aw_clk_frac frac;
62 uint32_t lock_retries;
67 #define WRITE4(_clk, off, val) \
68 CLKDEV_WRITE_4(clknode_get_device(_clk), off, val)
69 #define READ4(_clk, off, val) \
70 CLKDEV_READ_4(clknode_get_device(_clk), off, val)
71 #define DEVICE_LOCK(_clk) \
72 CLKDEV_DEVICE_LOCK(clknode_get_device(_clk))
73 #define DEVICE_UNLOCK(_clk) \
74 CLKDEV_DEVICE_UNLOCK(clknode_get_device(_clk))
77 aw_clk_nm_init(struct clknode *clk, device_t dev)
79 struct aw_clk_nm_sc *sc;
82 sc = clknode_get_softc(clk);
85 if ((sc->flags & AW_CLK_HAS_MUX) != 0) {
87 READ4(clk, sc->offset, &val);
90 idx = (val & sc->mux_mask) >> sc->mux_shift;
93 clknode_init_parent_idx(clk, idx);
98 aw_clk_nm_set_gate(struct clknode *clk, bool enable)
100 struct aw_clk_nm_sc *sc;
103 sc = clknode_get_softc(clk);
105 if ((sc->flags & AW_CLK_HAS_GATE) == 0)
109 READ4(clk, sc->offset, &val);
111 val |= (1 << sc->gate_shift);
113 val &= ~(1 << sc->gate_shift);
114 WRITE4(clk, sc->offset, val);
121 aw_clk_nm_set_mux(struct clknode *clk, int index)
123 struct aw_clk_nm_sc *sc;
126 sc = clknode_get_softc(clk);
128 if ((sc->flags & AW_CLK_HAS_MUX) == 0)
132 READ4(clk, sc->offset, &val);
133 val &= ~sc->mux_mask;
134 val |= index << sc->mux_shift;
135 WRITE4(clk, sc->offset, val);
142 aw_clk_nm_find_best(struct aw_clk_nm_sc *sc, uint64_t fparent, uint64_t *fout,
143 uint32_t *factor_n, uint32_t *factor_m)
146 uint32_t m, n, max_m, max_n, min_m, min_n;
148 *factor_n = *factor_m = 0;
150 max_m = aw_clk_factor_get_max(&sc->m);
151 max_n = aw_clk_factor_get_max(&sc->n);
152 min_m = aw_clk_factor_get_min(&sc->m);
153 min_n = aw_clk_factor_get_min(&sc->n);
155 for (m = min_m; m <= max_m; ) {
156 for (n = min_m; n <= max_n; ) {
157 cur = fparent / n / m;
158 if ((*fout - cur) < (*fout - best)) {
164 if ((sc->n.flags & AW_CLK_FACTOR_POWER_OF_TWO) != 0)
169 if ((sc->m.flags & AW_CLK_FACTOR_POWER_OF_TWO) != 0)
179 aw_clk_nm_set_freq(struct clknode *clk, uint64_t fparent, uint64_t *fout,
180 int flags, int *stop)
182 struct aw_clk_nm_sc *sc;
183 struct clknode *p_clk;
184 const char **p_names;
185 uint64_t cur, best, best_frac;
186 uint32_t val, m, n, best_m, best_n;
187 int p_idx, best_parent, retry;
189 sc = clknode_get_softc(clk);
191 best = best_frac = cur = 0;
194 if ((sc->flags & AW_CLK_REPARENT) != 0) {
195 p_names = clknode_get_parent_names(clk);
196 for (p_idx = 0; p_idx != clknode_get_parents_num(clk); p_idx++) {
197 p_clk = clknode_find_by_name(p_names[p_idx]);
198 clknode_get_freq(p_clk, &fparent);
200 cur = aw_clk_nm_find_best(sc, fparent, fout, &n, &m);
201 if ((*fout - cur) < (*fout - best)) {
209 p_idx = clknode_get_parent_idx(clk);
210 p_clk = clknode_get_parent(clk);
211 clknode_get_freq(p_clk, &fparent);
213 if (sc->flags & AW_CLK_HAS_FRAC &&
214 (*fout == sc->frac.freq0 || *fout == sc->frac.freq1))
215 best = best_frac = *fout;
218 best = aw_clk_nm_find_best(sc, fparent, fout,
222 if ((flags & CLK_SET_DRYRUN) != 0) {
228 if ((best < *fout) &&
229 ((flags & CLK_SET_ROUND_DOWN) == 0)) {
233 if ((best > *fout) &&
234 ((flags & CLK_SET_ROUND_UP) == 0)) {
239 if (p_idx != best_parent)
240 clknode_set_parent_by_idx(clk, best_parent);
243 READ4(clk, sc->offset, &val);
245 if (best_frac != 0) {
246 val &= ~sc->frac.mode_sel;
247 if (best_frac == sc->frac.freq0)
248 val &= ~sc->frac.freq_sel;
250 val |= sc->frac.freq_sel;
252 n = aw_clk_factor_get_value(&sc->n, best_n);
253 m = aw_clk_factor_get_value(&sc->m, best_m);
256 val |= n << sc->n.shift;
257 val |= m << sc->m.shift;
260 WRITE4(clk, sc->offset, val);
263 if ((sc->flags & AW_CLK_HAS_LOCK) != 0) {
264 for (retry = 0; retry < sc->lock_retries; retry++) {
265 READ4(clk, sc->offset, &val);
266 if ((val & (1 << sc->lock_shift)) != 0)
279 aw_clk_nm_recalc(struct clknode *clk, uint64_t *freq)
281 struct aw_clk_nm_sc *sc;
282 uint32_t val, m, n, prediv;
284 sc = clknode_get_softc(clk);
287 READ4(clk, sc->offset, &val);
290 if (sc->flags & AW_CLK_HAS_FRAC && ((val & sc->frac.mode_sel) == 0)) {
291 if (val & sc->frac.freq_sel)
292 *freq = sc->frac.freq1;
294 *freq = sc->frac.freq0;
296 m = aw_clk_get_factor(val, &sc->m);
297 n = aw_clk_get_factor(val, &sc->n);
298 if (sc->flags & AW_CLK_HAS_PREDIV)
299 prediv = aw_clk_get_factor(val, &sc->prediv);
303 *freq = *freq / prediv / n / m;
309 static clknode_method_t aw_nm_clknode_methods[] = {
310 /* Device interface */
311 CLKNODEMETHOD(clknode_init, aw_clk_nm_init),
312 CLKNODEMETHOD(clknode_set_gate, aw_clk_nm_set_gate),
313 CLKNODEMETHOD(clknode_set_mux, aw_clk_nm_set_mux),
314 CLKNODEMETHOD(clknode_recalc_freq, aw_clk_nm_recalc),
315 CLKNODEMETHOD(clknode_set_freq, aw_clk_nm_set_freq),
319 DEFINE_CLASS_1(aw_nm_clknode, aw_nm_clknode_class, aw_nm_clknode_methods,
320 sizeof(struct aw_clk_nm_sc), clknode_class);
323 aw_clk_nm_register(struct clkdom *clkdom, struct aw_clk_nm_def *clkdef)
326 struct aw_clk_nm_sc *sc;
328 clk = clknode_create(clkdom, &aw_nm_clknode_class, &clkdef->clkdef);
332 sc = clknode_get_softc(clk);
334 sc->offset = clkdef->offset;
336 sc->m.shift = clkdef->m.shift;
337 sc->m.width = clkdef->m.width;
338 sc->m.mask = ((1 << sc->m.width) - 1) << sc->m.shift;
339 sc->m.value = clkdef->m.value;
340 sc->m.flags = clkdef->m.flags;
342 sc->n.shift = clkdef->n.shift;
343 sc->n.width = clkdef->n.width;
344 sc->n.mask = ((1 << sc->n.width) - 1) << sc->n.shift;
345 sc->n.value = clkdef->n.value;
346 sc->n.flags = clkdef->n.flags;
348 sc->prediv.shift = clkdef->prediv.shift;
349 sc->prediv.width = clkdef->prediv.width;
350 sc->prediv.mask = ((1 << sc->prediv.width) - 1) << sc->prediv.shift;
351 sc->prediv.value = clkdef->prediv.value;
352 sc->prediv.flags = clkdef->prediv.flags;
353 sc->prediv.cond_shift = clkdef->prediv.cond_shift;
354 if (clkdef->prediv.cond_width != 0)
355 sc->prediv.cond_mask = ((1 << clkdef->prediv.cond_width) - 1) << sc->prediv.shift;
357 sc->prediv.cond_mask = clkdef->prediv.cond_mask;
358 sc->prediv.cond_value = clkdef->prediv.cond_value;
360 sc->frac.freq0 = clkdef->frac.freq0;
361 sc->frac.freq1 = clkdef->frac.freq1;
362 sc->frac.mode_sel = 1 << clkdef->frac.mode_sel;
363 sc->frac.freq_sel = 1 << clkdef->frac.freq_sel;
365 sc->mux_shift = clkdef->mux_shift;
366 sc->mux_mask = ((1 << clkdef->mux_width) - 1) << sc->mux_shift;
368 sc->gate_shift = clkdef->gate_shift;
370 sc->lock_shift = clkdef->lock_shift;
371 sc->lock_retries = clkdef->lock_retries;
373 sc->flags = clkdef->flags;
375 clknode_register(clkdom, clk);