blob: 13e176cdad1e33bc8d851382c2cfda004f6b88f0 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Kever Yang1650a682017-11-28 16:04:17 +08002/*
3 * (C) Copyright 2017 Rockchip Electronics Co., Ltd
Kever Yang1650a682017-11-28 16:04:17 +08004 */
5
6#include <common.h>
7#include <clk-uclass.h>
8#include <dm.h>
9#include <errno.h>
Simon Glass0f2af882020-05-10 11:40:05 -060010#include <log.h>
Simon Glass9bc15642020-02-03 07:36:16 -070011#include <malloc.h>
Kever Yang1650a682017-11-28 16:04:17 +080012#include <syscon.h>
13#include <asm/io.h>
Kever Yang9fbe17c2019-03-28 11:01:23 +080014#include <asm/arch-rockchip/clock.h>
15#include <asm/arch-rockchip/cru_rk3128.h>
16#include <asm/arch-rockchip/hardware.h>
Kever Yang1650a682017-11-28 16:04:17 +080017#include <bitfield.h>
Simon Glass95588622020-12-22 19:30:28 -070018#include <dm/device-internal.h>
Kever Yang1650a682017-11-28 16:04:17 +080019#include <dm/lists.h>
20#include <dt-bindings/clock/rk3128-cru.h>
Simon Glassdbd79542020-05-10 11:40:11 -060021#include <linux/delay.h>
Kever Yang1650a682017-11-28 16:04:17 +080022#include <linux/log2.h>
23
Kever Yang1650a682017-11-28 16:04:17 +080024enum {
25 VCO_MAX_HZ = 2400U * 1000000,
26 VCO_MIN_HZ = 600 * 1000000,
27 OUTPUT_MAX_HZ = 2400U * 1000000,
28 OUTPUT_MIN_HZ = 24 * 1000000,
29};
30
31#define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
32
33#define PLL_DIVISORS(hz, _refdiv, _postdiv1, _postdiv2) {\
34 .refdiv = _refdiv,\
35 .fbdiv = (u32)((u64)hz * _refdiv * _postdiv1 * _postdiv2 / OSC_HZ),\
36 .postdiv1 = _postdiv1, .postdiv2 = _postdiv2};
37
38/* use integer mode*/
39static const struct pll_div apll_init_cfg = PLL_DIVISORS(APLL_HZ, 1, 3, 1);
40static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 2, 2, 1);
41
42static int rkclk_set_pll(struct rk3128_cru *cru, enum rk_clk_id clk_id,
43 const struct pll_div *div)
44{
45 int pll_id = rk_pll_id(clk_id);
46 struct rk3128_pll *pll = &cru->pll[pll_id];
47
48 /* All PLLs have same VCO and output frequency range restrictions. */
49 uint vco_hz = OSC_HZ / 1000 * div->fbdiv / div->refdiv * 1000;
50 uint output_hz = vco_hz / div->postdiv1 / div->postdiv2;
51
52 debug("PLL at %p:fd=%d,rd=%d,pd1=%d,pd2=%d,vco=%uHz,output=%uHz\n",
53 pll, div->fbdiv, div->refdiv, div->postdiv1,
54 div->postdiv2, vco_hz, output_hz);
55 assert(vco_hz >= VCO_MIN_HZ && vco_hz <= VCO_MAX_HZ &&
56 output_hz >= OUTPUT_MIN_HZ && output_hz <= OUTPUT_MAX_HZ);
57
58 /* use integer mode */
59 rk_setreg(&pll->con1, 1 << PLL_DSMPD_SHIFT);
60 /* Power down */
61 rk_setreg(&pll->con1, 1 << PLL_PD_SHIFT);
62
63 rk_clrsetreg(&pll->con0,
64 PLL_POSTDIV1_MASK | PLL_FBDIV_MASK,
65 (div->postdiv1 << PLL_POSTDIV1_SHIFT) | div->fbdiv);
66 rk_clrsetreg(&pll->con1, PLL_POSTDIV2_MASK | PLL_REFDIV_MASK,
67 (div->postdiv2 << PLL_POSTDIV2_SHIFT |
68 div->refdiv << PLL_REFDIV_SHIFT));
69
70 /* Power Up */
71 rk_clrreg(&pll->con1, 1 << PLL_PD_SHIFT);
72
73 /* waiting for pll lock */
74 while (readl(&pll->con1) & (1 << PLL_LOCK_STATUS_SHIFT))
75 udelay(1);
76
77 return 0;
78}
79
80static int pll_para_config(u32 freq_hz, struct pll_div *div)
81{
82 u32 ref_khz = OSC_HZ / 1000, refdiv, fbdiv = 0;
83 u32 postdiv1, postdiv2 = 1;
84 u32 fref_khz;
85 u32 diff_khz, best_diff_khz;
86 const u32 max_refdiv = 63, max_fbdiv = 3200, min_fbdiv = 16;
87 const u32 max_postdiv1 = 7, max_postdiv2 = 7;
88 u32 vco_khz;
89 u32 freq_khz = freq_hz / 1000;
90
91 if (!freq_hz) {
92 printf("%s: the frequency can't be 0 Hz\n", __func__);
93 return -1;
94 }
95
96 postdiv1 = DIV_ROUND_UP(VCO_MIN_HZ / 1000, freq_khz);
97 if (postdiv1 > max_postdiv1) {
98 postdiv2 = DIV_ROUND_UP(postdiv1, max_postdiv1);
99 postdiv1 = DIV_ROUND_UP(postdiv1, postdiv2);
100 }
101
102 vco_khz = freq_khz * postdiv1 * postdiv2;
103
104 if (vco_khz < (VCO_MIN_HZ / 1000) || vco_khz > (VCO_MAX_HZ / 1000) ||
105 postdiv2 > max_postdiv2) {
106 printf("%s: Cannot find out a supported VCO for Freq (%uHz)\n",
107 __func__, freq_hz);
108 return -1;
109 }
110
111 div->postdiv1 = postdiv1;
112 div->postdiv2 = postdiv2;
113
114 best_diff_khz = vco_khz;
115 for (refdiv = 1; refdiv < max_refdiv && best_diff_khz; refdiv++) {
116 fref_khz = ref_khz / refdiv;
117
118 fbdiv = vco_khz / fref_khz;
119 if ((fbdiv >= max_fbdiv) || (fbdiv <= min_fbdiv))
120 continue;
121 diff_khz = vco_khz - fbdiv * fref_khz;
122 if (fbdiv + 1 < max_fbdiv && diff_khz > fref_khz / 2) {
123 fbdiv++;
124 diff_khz = fref_khz - diff_khz;
125 }
126
127 if (diff_khz >= best_diff_khz)
128 continue;
129
130 best_diff_khz = diff_khz;
131 div->refdiv = refdiv;
132 div->fbdiv = fbdiv;
133 }
134
135 if (best_diff_khz > 4 * (1000)) {
136 printf("%s: Failed to match output frequency %u bestis %u Hz\n",
137 __func__, freq_hz,
138 best_diff_khz * 1000);
139 return -1;
140 }
141 return 0;
142}
143
144static void rkclk_init(struct rk3128_cru *cru)
145{
146 u32 aclk_div;
147 u32 hclk_div;
148 u32 pclk_div;
149
150 /* pll enter slow-mode */
151 rk_clrsetreg(&cru->cru_mode_con,
152 GPLL_MODE_MASK | APLL_MODE_MASK,
153 GPLL_MODE_SLOW << GPLL_MODE_SHIFT |
154 APLL_MODE_SLOW << APLL_MODE_SHIFT);
155
156 /* init pll */
157 rkclk_set_pll(cru, CLK_ARM, &apll_init_cfg);
158 rkclk_set_pll(cru, CLK_GENERAL, &gpll_init_cfg);
159
160 /*
161 * select apll as cpu/core clock pll source and
162 * set up dependent divisors for PERI and ACLK clocks.
163 * core hz : apll = 1:1
164 */
165 aclk_div = APLL_HZ / CORE_ACLK_HZ - 1;
166 assert((aclk_div + 1) * CORE_ACLK_HZ == APLL_HZ && aclk_div < 0x7);
167
168 pclk_div = APLL_HZ / CORE_PERI_HZ - 1;
169 assert((pclk_div + 1) * CORE_PERI_HZ == APLL_HZ && pclk_div < 0xf);
170
171 rk_clrsetreg(&cru->cru_clksel_con[0],
172 CORE_CLK_PLL_SEL_MASK | CORE_DIV_CON_MASK,
173 CORE_CLK_PLL_SEL_APLL << CORE_CLK_PLL_SEL_SHIFT |
174 0 << CORE_DIV_CON_SHIFT);
175
176 rk_clrsetreg(&cru->cru_clksel_con[1],
177 CORE_ACLK_DIV_MASK | CORE_PERI_DIV_MASK,
178 aclk_div << CORE_ACLK_DIV_SHIFT |
179 pclk_div << CORE_PERI_DIV_SHIFT);
180
181 /*
182 * select gpll as pd_bus bus clock source and
183 * set up dependent divisors for PCLK/HCLK and ACLK clocks.
184 */
185 aclk_div = GPLL_HZ / BUS_ACLK_HZ - 1;
186 assert((aclk_div + 1) * BUS_ACLK_HZ == GPLL_HZ && aclk_div <= 0x1f);
187
188 pclk_div = BUS_ACLK_HZ / BUS_PCLK_HZ - 1;
189 assert((pclk_div + 1) * BUS_PCLK_HZ == BUS_ACLK_HZ && pclk_div <= 0x7);
190
191 hclk_div = BUS_ACLK_HZ / BUS_HCLK_HZ - 1;
192 assert((hclk_div + 1) * BUS_HCLK_HZ == BUS_ACLK_HZ && hclk_div <= 0x3);
193
194 rk_clrsetreg(&cru->cru_clksel_con[0],
195 BUS_ACLK_PLL_SEL_MASK | BUS_ACLK_DIV_MASK,
196 BUS_ACLK_PLL_SEL_GPLL << BUS_ACLK_PLL_SEL_SHIFT |
197 aclk_div << BUS_ACLK_DIV_SHIFT);
198
199 rk_clrsetreg(&cru->cru_clksel_con[1],
200 BUS_PCLK_DIV_MASK | BUS_HCLK_DIV_MASK,
201 pclk_div << BUS_PCLK_DIV_SHIFT |
202 hclk_div << BUS_HCLK_DIV_SHIFT);
203
204 /*
205 * select gpll as pd_peri bus clock source and
206 * set up dependent divisors for PCLK/HCLK and ACLK clocks.
207 */
208 aclk_div = GPLL_HZ / PERI_ACLK_HZ - 1;
209 assert((aclk_div + 1) * PERI_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
210
211 hclk_div = ilog2(PERI_ACLK_HZ / PERI_HCLK_HZ);
212 assert((1 << hclk_div) * PERI_HCLK_HZ ==
213 PERI_ACLK_HZ && (hclk_div < 0x4));
214
215 pclk_div = ilog2(PERI_ACLK_HZ / PERI_PCLK_HZ);
216 assert((1 << pclk_div) * PERI_PCLK_HZ ==
217 PERI_ACLK_HZ && pclk_div < 0x8);
218
219 rk_clrsetreg(&cru->cru_clksel_con[10],
220 PERI_PLL_SEL_MASK | PERI_PCLK_DIV_MASK |
221 PERI_HCLK_DIV_MASK | PERI_ACLK_DIV_MASK,
222 PERI_PLL_GPLL << PERI_PLL_SEL_SHIFT |
223 pclk_div << PERI_PCLK_DIV_SHIFT |
224 hclk_div << PERI_HCLK_DIV_SHIFT |
225 aclk_div << PERI_ACLK_DIV_SHIFT);
226
227 /* PLL enter normal-mode */
228 rk_clrsetreg(&cru->cru_mode_con,
229 GPLL_MODE_MASK | APLL_MODE_MASK | CPLL_MODE_MASK,
230 GPLL_MODE_NORM << GPLL_MODE_SHIFT |
231 APLL_MODE_NORM << APLL_MODE_SHIFT |
232 CPLL_MODE_NORM << CPLL_MODE_SHIFT);
233
234 /*fix NAND controller working clock max to 150Mhz */
235 rk_clrsetreg(&cru->cru_clksel_con[2],
236 NANDC_PLL_SEL_MASK | NANDC_CLK_DIV_MASK,
237 NANDC_PLL_SEL_GPLL << NANDC_PLL_SEL_SHIFT |
238 3 << NANDC_CLK_DIV_SHIFT);
239}
240
241/* Get pll rate by id */
242static u32 rkclk_pll_get_rate(struct rk3128_cru *cru,
243 enum rk_clk_id clk_id)
244{
245 u32 refdiv, fbdiv, postdiv1, postdiv2;
246 u32 con;
247 int pll_id = rk_pll_id(clk_id);
248 struct rk3128_pll *pll = &cru->pll[pll_id];
249 static u8 clk_shift[CLK_COUNT] = {
250 0xff, APLL_MODE_SHIFT, DPLL_MODE_SHIFT, CPLL_MODE_SHIFT,
251 GPLL_MODE_SHIFT, 0xff
252 };
253 static u32 clk_mask[CLK_COUNT] = {
254 0xff, APLL_MODE_MASK, DPLL_MODE_MASK, CPLL_MODE_MASK,
255 GPLL_MODE_MASK, 0xff
256 };
257 uint shift;
258 uint mask;
259
260 con = readl(&cru->cru_mode_con);
261 shift = clk_shift[clk_id];
262 mask = clk_mask[clk_id];
263
264 switch ((con & mask) >> shift) {
265 case GPLL_MODE_SLOW:
266 return OSC_HZ;
267 case GPLL_MODE_NORM:
268 /* normal mode */
269 con = readl(&pll->con0);
270 postdiv1 = (con & PLL_POSTDIV1_MASK) >> PLL_POSTDIV1_SHIFT;
271 fbdiv = (con & PLL_FBDIV_MASK) >> PLL_FBDIV_SHIFT;
272 con = readl(&pll->con1);
273 postdiv2 = (con & PLL_POSTDIV2_MASK) >> PLL_POSTDIV2_SHIFT;
274 refdiv = (con & PLL_REFDIV_MASK) >> PLL_REFDIV_SHIFT;
275 return (24 * fbdiv / (refdiv * postdiv1 * postdiv2)) * 1000000;
276 case GPLL_MODE_DEEP:
277 default:
278 return 32768;
279 }
280}
281
282static ulong rockchip_mmc_get_clk(struct rk3128_cru *cru, uint clk_general_rate,
283 int periph)
284{
285 uint src_rate;
286 uint div, mux;
287 u32 con;
288
289 switch (periph) {
290 case HCLK_EMMC:
291 case SCLK_EMMC:
292 case SCLK_EMMC_SAMPLE:
293 con = readl(&cru->cru_clksel_con[12]);
294 mux = (con & EMMC_PLL_MASK) >> EMMC_PLL_SHIFT;
295 div = (con & EMMC_DIV_MASK) >> EMMC_DIV_SHIFT;
296 break;
297 case HCLK_SDMMC:
298 case SCLK_SDMMC:
299 con = readl(&cru->cru_clksel_con[11]);
300 mux = (con & MMC0_PLL_MASK) >> MMC0_PLL_SHIFT;
301 div = (con & MMC0_DIV_MASK) >> MMC0_DIV_SHIFT;
302 break;
303 default:
304 return -EINVAL;
305 }
306
307 src_rate = mux == EMMC_SEL_24M ? OSC_HZ : clk_general_rate;
308 return DIV_TO_RATE(src_rate, div);
309}
310
311static ulong rockchip_mmc_set_clk(struct rk3128_cru *cru, uint clk_general_rate,
312 int periph, uint freq)
313{
314 int src_clk_div;
315 int mux;
316
317 debug("%s: clk_general_rate=%u\n", __func__, clk_general_rate);
318
319 /* mmc clock defaulg div 2 internal, need provide double in cru */
320 src_clk_div = DIV_ROUND_UP(clk_general_rate / 2, freq);
321
322 if (src_clk_div > 128) {
323 src_clk_div = DIV_ROUND_UP(OSC_HZ / 2, freq);
324 mux = EMMC_SEL_24M;
325 } else {
326 mux = EMMC_SEL_GPLL;
327 }
328
329 switch (periph) {
330 case HCLK_EMMC:
331 rk_clrsetreg(&cru->cru_clksel_con[12],
332 EMMC_PLL_MASK | EMMC_DIV_MASK,
333 mux << EMMC_PLL_SHIFT |
334 (src_clk_div - 1) << EMMC_DIV_SHIFT);
335 break;
336 case HCLK_SDMMC:
337 case SCLK_SDMMC:
338 rk_clrsetreg(&cru->cru_clksel_con[11],
339 MMC0_PLL_MASK | MMC0_DIV_MASK,
340 mux << MMC0_PLL_SHIFT |
341 (src_clk_div - 1) << MMC0_DIV_SHIFT);
342 break;
343 default:
344 return -EINVAL;
345 }
346
347 return rockchip_mmc_get_clk(cru, clk_general_rate, periph);
348}
349
350static ulong rk3128_peri_get_pclk(struct rk3128_cru *cru, ulong clk_id)
351{
352 u32 div, con;
353
354 switch (clk_id) {
355 case PCLK_I2C0:
356 case PCLK_I2C1:
357 case PCLK_I2C2:
358 case PCLK_I2C3:
359 case PCLK_PWM:
360 con = readl(&cru->cru_clksel_con[10]);
361 div = con >> 12 & 0x3;
362 break;
363 default:
364 printf("do not support this peripheral bus\n");
365 return -EINVAL;
366 }
367
368 return DIV_TO_RATE(PERI_ACLK_HZ, div);
369}
370
371static ulong rk3128_peri_set_pclk(struct rk3128_cru *cru, ulong clk_id, uint hz)
372{
373 int src_clk_div;
374
375 src_clk_div = PERI_ACLK_HZ / hz;
376 assert(src_clk_div - 1 < 4);
377
378 switch (clk_id) {
379 case PCLK_I2C0:
380 case PCLK_I2C1:
381 case PCLK_I2C2:
382 case PCLK_I2C3:
383 case PCLK_PWM:
384 rk_setreg(&cru->cru_clksel_con[10],
385 ((src_clk_div - 1) << 12));
386 break;
387 default:
388 printf("do not support this peripheral bus\n");
389 return -EINVAL;
390 }
391
392 return DIV_TO_RATE(PERI_ACLK_HZ, src_clk_div);
393}
394
395static ulong rk3128_saradc_get_clk(struct rk3128_cru *cru)
396{
397 u32 div, val;
398
399 val = readl(&cru->cru_clksel_con[24]);
400 div = bitfield_extract(val, SARADC_DIV_CON_SHIFT,
401 SARADC_DIV_CON_WIDTH);
402
403 return DIV_TO_RATE(OSC_HZ, div);
404}
405
406static ulong rk3128_saradc_set_clk(struct rk3128_cru *cru, uint hz)
407{
408 int src_clk_div;
409
410 src_clk_div = DIV_ROUND_UP(OSC_HZ, hz) - 1;
411 assert(src_clk_div < 128);
412
413 rk_clrsetreg(&cru->cru_clksel_con[24],
414 SARADC_DIV_CON_MASK,
415 src_clk_div << SARADC_DIV_CON_SHIFT);
416
417 return rk3128_saradc_get_clk(cru);
418}
419
420static ulong rk3128_vop_set_clk(struct rk3128_cru *cru, ulong clk_id, uint hz)
421{
422 int src_clk_div;
423 struct pll_div cpll_config = {0};
424
425 src_clk_div = GPLL_HZ / hz;
426 assert(src_clk_div - 1 < 31);
427
428 switch (clk_id) {
429 case ACLK_VIO0:
430 rk_clrsetreg(&cru->cru_clksel_con[31],
431 VIO0_PLL_MASK | VIO0_DIV_MASK,
432 VIO0_SEL_GPLL << VIO0_PLL_SHIFT |
433 (src_clk_div - 1) << VIO0_DIV_SHIFT);
434 break;
435 case ACLK_VIO1:
436 rk_clrsetreg(&cru->cru_clksel_con[31],
437 VIO1_PLL_MASK | VIO1_DIV_MASK,
438 VIO1_SEL_GPLL << VIO1_PLL_SHIFT |
439 (src_clk_div - 1) << VIO1_DIV_SHIFT);
440 break;
Johan Jonker9d3bfc32022-09-09 22:18:45 +0200441 case DCLK_VOP:
Kever Yang1650a682017-11-28 16:04:17 +0800442 if (pll_para_config(hz, &cpll_config))
443 return -1;
444 rkclk_set_pll(cru, CLK_CODEC, &cpll_config);
445
446 rk_clrsetreg(&cru->cru_clksel_con[27],
447 DCLK_VOP_SEL_MASK | DCLK_VOP_DIV_CON_MASK,
448 DCLK_VOP_PLL_SEL_CPLL << DCLK_VOP_SEL_SHIFT |
449 (1 - 1) << DCLK_VOP_DIV_CON_SHIFT);
450 break;
451 default:
452 printf("do not support this vop freq\n");
453 return -EINVAL;
454 }
455
456 return hz;
457}
458
459static ulong rk3128_vop_get_rate(struct rk3128_cru *cru, ulong clk_id)
460{
461 u32 div, con, parent;
462
463 switch (clk_id) {
464 case ACLK_VIO0:
465 con = readl(&cru->cru_clksel_con[31]);
466 div = con & 0x1f;
467 parent = GPLL_HZ;
468 break;
469 case ACLK_VIO1:
470 con = readl(&cru->cru_clksel_con[31]);
471 div = (con >> 8) & 0x1f;
472 parent = GPLL_HZ;
473 break;
Johan Jonker9d3bfc32022-09-09 22:18:45 +0200474 case DCLK_VOP:
Kever Yang1650a682017-11-28 16:04:17 +0800475 con = readl(&cru->cru_clksel_con[27]);
476 div = (con >> 8) & 0xfff;
477 parent = rkclk_pll_get_rate(cru, CLK_CODEC);
478 break;
479 default:
480 return -ENOENT;
481 }
482 return DIV_TO_RATE(parent, div);
483}
484
485static ulong rk3128_clk_get_rate(struct clk *clk)
486{
487 struct rk3128_clk_priv *priv = dev_get_priv(clk->dev);
488
489 switch (clk->id) {
490 case 0 ... 63:
491 return rkclk_pll_get_rate(priv->cru, clk->id);
492 case PCLK_I2C0:
493 case PCLK_I2C1:
494 case PCLK_I2C2:
495 case PCLK_I2C3:
496 case PCLK_PWM:
497 return rk3128_peri_get_pclk(priv->cru, clk->id);
498 case SCLK_SARADC:
499 return rk3128_saradc_get_clk(priv->cru);
Johan Jonker9d3bfc32022-09-09 22:18:45 +0200500 case DCLK_VOP:
Kever Yang1650a682017-11-28 16:04:17 +0800501 case ACLK_VIO0:
502 case ACLK_VIO1:
503 return rk3128_vop_get_rate(priv->cru, clk->id);
504 default:
505 return -ENOENT;
506 }
507}
508
509static ulong rk3128_clk_set_rate(struct clk *clk, ulong rate)
510{
511 struct rk3128_clk_priv *priv = dev_get_priv(clk->dev);
512 ulong new_rate, gclk_rate;
513
514 gclk_rate = rkclk_pll_get_rate(priv->cru, CLK_GENERAL);
515 switch (clk->id) {
516 case 0 ... 63:
517 return 0;
Johan Jonker9d3bfc32022-09-09 22:18:45 +0200518 case DCLK_VOP:
Kever Yang1650a682017-11-28 16:04:17 +0800519 case ACLK_VIO0:
520 case ACLK_VIO1:
521 new_rate = rk3128_vop_set_clk(priv->cru,
522 clk->id, rate);
523 break;
524 case HCLK_EMMC:
525 new_rate = rockchip_mmc_set_clk(priv->cru, gclk_rate,
526 clk->id, rate);
527 break;
528 case PCLK_I2C0:
529 case PCLK_I2C1:
530 case PCLK_I2C2:
531 case PCLK_I2C3:
532 case PCLK_PWM:
533 new_rate = rk3128_peri_set_pclk(priv->cru, clk->id, rate);
534 break;
535 case SCLK_SARADC:
536 new_rate = rk3128_saradc_set_clk(priv->cru, rate);
537 break;
538 default:
539 return -ENOENT;
540 }
541
542 return new_rate;
543}
544
545static struct clk_ops rk3128_clk_ops = {
546 .get_rate = rk3128_clk_get_rate,
547 .set_rate = rk3128_clk_set_rate,
548};
549
Simon Glassaad29ae2020-12-03 16:55:21 -0700550static int rk3128_clk_of_to_plat(struct udevice *dev)
Kever Yang481cbed2018-04-24 11:27:07 +0800551{
552 struct rk3128_clk_priv *priv = dev_get_priv(dev);
553
554 priv->cru = dev_read_addr_ptr(dev);
555
556 return 0;
557}
558
Kever Yang1650a682017-11-28 16:04:17 +0800559static int rk3128_clk_probe(struct udevice *dev)
560{
561 struct rk3128_clk_priv *priv = dev_get_priv(dev);
562
Kever Yang1650a682017-11-28 16:04:17 +0800563 rkclk_init(priv->cru);
564
565 return 0;
566}
567
568static int rk3128_clk_bind(struct udevice *dev)
569{
570 int ret;
571 struct udevice *sys_child;
572 struct sysreset_reg *priv;
573
574 /* The reset driver does not have a device node, so bind it here */
575 ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset",
576 &sys_child);
577 if (ret) {
578 debug("Warning: No sysreset driver: ret=%d\n", ret);
579 } else {
580 priv = malloc(sizeof(struct sysreset_reg));
581 priv->glb_srst_fst_value = offsetof(struct rk3128_cru,
582 cru_glb_srst_fst_value);
583 priv->glb_srst_snd_value = offsetof(struct rk3128_cru,
584 cru_glb_srst_snd_value);
Simon Glass95588622020-12-22 19:30:28 -0700585 dev_set_priv(sys_child, priv);
Kever Yang1650a682017-11-28 16:04:17 +0800586 }
587
588 return 0;
589}
590
591static const struct udevice_id rk3128_clk_ids[] = {
592 { .compatible = "rockchip,rk3128-cru" },
593 { .compatible = "rockchip,rk3126-cru" },
594 { }
595};
596
597U_BOOT_DRIVER(rockchip_rk3128_cru) = {
598 .name = "clk_rk3128",
599 .id = UCLASS_CLK,
600 .of_match = rk3128_clk_ids,
Simon Glass8a2b47f2020-12-03 16:55:17 -0700601 .priv_auto = sizeof(struct rk3128_clk_priv),
Simon Glassaad29ae2020-12-03 16:55:21 -0700602 .of_to_plat = rk3128_clk_of_to_plat,
Kever Yang1650a682017-11-28 16:04:17 +0800603 .ops = &rk3128_clk_ops,
604 .bind = rk3128_clk_bind,
605 .probe = rk3128_clk_probe,
606};