blob: a07285593b554b8855048e25e4b9a52ad55a9dde [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Kever Yang1650a682017-11-28 16:04:17 +08002/*
3 * (C) Copyright 2017 Rockchip Electronics Co., Ltd
Kever Yang1650a682017-11-28 16:04:17 +08004 */
5
Kever Yang1650a682017-11-28 16:04:17 +08006#include <clk-uclass.h>
7#include <dm.h>
8#include <errno.h>
Simon Glass0f2af882020-05-10 11:40:05 -06009#include <log.h>
Simon Glass9bc15642020-02-03 07:36:16 -070010#include <malloc.h>
Kever Yang1650a682017-11-28 16:04:17 +080011#include <syscon.h>
Kever Yang9fbe17c2019-03-28 11:01:23 +080012#include <asm/arch-rockchip/clock.h>
13#include <asm/arch-rockchip/cru_rk3128.h>
14#include <asm/arch-rockchip/hardware.h>
Kever Yang1650a682017-11-28 16:04:17 +080015#include <bitfield.h>
Simon Glass95588622020-12-22 19:30:28 -070016#include <dm/device-internal.h>
Kever Yang1650a682017-11-28 16:04:17 +080017#include <dm/lists.h>
18#include <dt-bindings/clock/rk3128-cru.h>
Simon Glassdbd79542020-05-10 11:40:11 -060019#include <linux/delay.h>
Kever Yang1650a682017-11-28 16:04:17 +080020#include <linux/log2.h>
21
Kever Yang1650a682017-11-28 16:04:17 +080022enum {
23 VCO_MAX_HZ = 2400U * 1000000,
24 VCO_MIN_HZ = 600 * 1000000,
25 OUTPUT_MAX_HZ = 2400U * 1000000,
26 OUTPUT_MIN_HZ = 24 * 1000000,
27};
28
29#define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
30
31#define PLL_DIVISORS(hz, _refdiv, _postdiv1, _postdiv2) {\
32 .refdiv = _refdiv,\
33 .fbdiv = (u32)((u64)hz * _refdiv * _postdiv1 * _postdiv2 / OSC_HZ),\
34 .postdiv1 = _postdiv1, .postdiv2 = _postdiv2};
35
36/* use integer mode*/
37static const struct pll_div apll_init_cfg = PLL_DIVISORS(APLL_HZ, 1, 3, 1);
38static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 2, 2, 1);
39
40static int rkclk_set_pll(struct rk3128_cru *cru, enum rk_clk_id clk_id,
41 const struct pll_div *div)
42{
43 int pll_id = rk_pll_id(clk_id);
44 struct rk3128_pll *pll = &cru->pll[pll_id];
45
46 /* All PLLs have same VCO and output frequency range restrictions. */
47 uint vco_hz = OSC_HZ / 1000 * div->fbdiv / div->refdiv * 1000;
48 uint output_hz = vco_hz / div->postdiv1 / div->postdiv2;
49
50 debug("PLL at %p:fd=%d,rd=%d,pd1=%d,pd2=%d,vco=%uHz,output=%uHz\n",
51 pll, div->fbdiv, div->refdiv, div->postdiv1,
52 div->postdiv2, vco_hz, output_hz);
53 assert(vco_hz >= VCO_MIN_HZ && vco_hz <= VCO_MAX_HZ &&
54 output_hz >= OUTPUT_MIN_HZ && output_hz <= OUTPUT_MAX_HZ);
55
56 /* use integer mode */
57 rk_setreg(&pll->con1, 1 << PLL_DSMPD_SHIFT);
58 /* Power down */
59 rk_setreg(&pll->con1, 1 << PLL_PD_SHIFT);
60
61 rk_clrsetreg(&pll->con0,
62 PLL_POSTDIV1_MASK | PLL_FBDIV_MASK,
63 (div->postdiv1 << PLL_POSTDIV1_SHIFT) | div->fbdiv);
64 rk_clrsetreg(&pll->con1, PLL_POSTDIV2_MASK | PLL_REFDIV_MASK,
65 (div->postdiv2 << PLL_POSTDIV2_SHIFT |
66 div->refdiv << PLL_REFDIV_SHIFT));
67
68 /* Power Up */
69 rk_clrreg(&pll->con1, 1 << PLL_PD_SHIFT);
70
71 /* waiting for pll lock */
72 while (readl(&pll->con1) & (1 << PLL_LOCK_STATUS_SHIFT))
73 udelay(1);
74
75 return 0;
76}
77
78static int pll_para_config(u32 freq_hz, struct pll_div *div)
79{
80 u32 ref_khz = OSC_HZ / 1000, refdiv, fbdiv = 0;
81 u32 postdiv1, postdiv2 = 1;
82 u32 fref_khz;
83 u32 diff_khz, best_diff_khz;
84 const u32 max_refdiv = 63, max_fbdiv = 3200, min_fbdiv = 16;
85 const u32 max_postdiv1 = 7, max_postdiv2 = 7;
86 u32 vco_khz;
87 u32 freq_khz = freq_hz / 1000;
88
89 if (!freq_hz) {
90 printf("%s: the frequency can't be 0 Hz\n", __func__);
91 return -1;
92 }
93
94 postdiv1 = DIV_ROUND_UP(VCO_MIN_HZ / 1000, freq_khz);
95 if (postdiv1 > max_postdiv1) {
96 postdiv2 = DIV_ROUND_UP(postdiv1, max_postdiv1);
97 postdiv1 = DIV_ROUND_UP(postdiv1, postdiv2);
98 }
99
100 vco_khz = freq_khz * postdiv1 * postdiv2;
101
102 if (vco_khz < (VCO_MIN_HZ / 1000) || vco_khz > (VCO_MAX_HZ / 1000) ||
103 postdiv2 > max_postdiv2) {
104 printf("%s: Cannot find out a supported VCO for Freq (%uHz)\n",
105 __func__, freq_hz);
106 return -1;
107 }
108
109 div->postdiv1 = postdiv1;
110 div->postdiv2 = postdiv2;
111
112 best_diff_khz = vco_khz;
113 for (refdiv = 1; refdiv < max_refdiv && best_diff_khz; refdiv++) {
114 fref_khz = ref_khz / refdiv;
115
116 fbdiv = vco_khz / fref_khz;
117 if ((fbdiv >= max_fbdiv) || (fbdiv <= min_fbdiv))
118 continue;
119 diff_khz = vco_khz - fbdiv * fref_khz;
120 if (fbdiv + 1 < max_fbdiv && diff_khz > fref_khz / 2) {
121 fbdiv++;
122 diff_khz = fref_khz - diff_khz;
123 }
124
125 if (diff_khz >= best_diff_khz)
126 continue;
127
128 best_diff_khz = diff_khz;
129 div->refdiv = refdiv;
130 div->fbdiv = fbdiv;
131 }
132
133 if (best_diff_khz > 4 * (1000)) {
134 printf("%s: Failed to match output frequency %u bestis %u Hz\n",
135 __func__, freq_hz,
136 best_diff_khz * 1000);
137 return -1;
138 }
139 return 0;
140}
141
142static void rkclk_init(struct rk3128_cru *cru)
143{
144 u32 aclk_div;
145 u32 hclk_div;
146 u32 pclk_div;
147
148 /* pll enter slow-mode */
149 rk_clrsetreg(&cru->cru_mode_con,
150 GPLL_MODE_MASK | APLL_MODE_MASK,
151 GPLL_MODE_SLOW << GPLL_MODE_SHIFT |
152 APLL_MODE_SLOW << APLL_MODE_SHIFT);
153
154 /* init pll */
155 rkclk_set_pll(cru, CLK_ARM, &apll_init_cfg);
156 rkclk_set_pll(cru, CLK_GENERAL, &gpll_init_cfg);
157
158 /*
159 * select apll as cpu/core clock pll source and
160 * set up dependent divisors for PERI and ACLK clocks.
161 * core hz : apll = 1:1
162 */
163 aclk_div = APLL_HZ / CORE_ACLK_HZ - 1;
164 assert((aclk_div + 1) * CORE_ACLK_HZ == APLL_HZ && aclk_div < 0x7);
165
166 pclk_div = APLL_HZ / CORE_PERI_HZ - 1;
167 assert((pclk_div + 1) * CORE_PERI_HZ == APLL_HZ && pclk_div < 0xf);
168
169 rk_clrsetreg(&cru->cru_clksel_con[0],
170 CORE_CLK_PLL_SEL_MASK | CORE_DIV_CON_MASK,
171 CORE_CLK_PLL_SEL_APLL << CORE_CLK_PLL_SEL_SHIFT |
172 0 << CORE_DIV_CON_SHIFT);
173
174 rk_clrsetreg(&cru->cru_clksel_con[1],
175 CORE_ACLK_DIV_MASK | CORE_PERI_DIV_MASK,
176 aclk_div << CORE_ACLK_DIV_SHIFT |
177 pclk_div << CORE_PERI_DIV_SHIFT);
178
179 /*
180 * select gpll as pd_bus bus clock source and
181 * set up dependent divisors for PCLK/HCLK and ACLK clocks.
182 */
183 aclk_div = GPLL_HZ / BUS_ACLK_HZ - 1;
184 assert((aclk_div + 1) * BUS_ACLK_HZ == GPLL_HZ && aclk_div <= 0x1f);
185
186 pclk_div = BUS_ACLK_HZ / BUS_PCLK_HZ - 1;
187 assert((pclk_div + 1) * BUS_PCLK_HZ == BUS_ACLK_HZ && pclk_div <= 0x7);
188
189 hclk_div = BUS_ACLK_HZ / BUS_HCLK_HZ - 1;
190 assert((hclk_div + 1) * BUS_HCLK_HZ == BUS_ACLK_HZ && hclk_div <= 0x3);
191
192 rk_clrsetreg(&cru->cru_clksel_con[0],
193 BUS_ACLK_PLL_SEL_MASK | BUS_ACLK_DIV_MASK,
194 BUS_ACLK_PLL_SEL_GPLL << BUS_ACLK_PLL_SEL_SHIFT |
195 aclk_div << BUS_ACLK_DIV_SHIFT);
196
197 rk_clrsetreg(&cru->cru_clksel_con[1],
198 BUS_PCLK_DIV_MASK | BUS_HCLK_DIV_MASK,
199 pclk_div << BUS_PCLK_DIV_SHIFT |
200 hclk_div << BUS_HCLK_DIV_SHIFT);
201
202 /*
203 * select gpll as pd_peri bus clock source and
204 * set up dependent divisors for PCLK/HCLK and ACLK clocks.
205 */
206 aclk_div = GPLL_HZ / PERI_ACLK_HZ - 1;
207 assert((aclk_div + 1) * PERI_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
208
209 hclk_div = ilog2(PERI_ACLK_HZ / PERI_HCLK_HZ);
210 assert((1 << hclk_div) * PERI_HCLK_HZ ==
211 PERI_ACLK_HZ && (hclk_div < 0x4));
212
213 pclk_div = ilog2(PERI_ACLK_HZ / PERI_PCLK_HZ);
214 assert((1 << pclk_div) * PERI_PCLK_HZ ==
215 PERI_ACLK_HZ && pclk_div < 0x8);
216
217 rk_clrsetreg(&cru->cru_clksel_con[10],
218 PERI_PLL_SEL_MASK | PERI_PCLK_DIV_MASK |
219 PERI_HCLK_DIV_MASK | PERI_ACLK_DIV_MASK,
220 PERI_PLL_GPLL << PERI_PLL_SEL_SHIFT |
221 pclk_div << PERI_PCLK_DIV_SHIFT |
222 hclk_div << PERI_HCLK_DIV_SHIFT |
223 aclk_div << PERI_ACLK_DIV_SHIFT);
224
225 /* PLL enter normal-mode */
226 rk_clrsetreg(&cru->cru_mode_con,
227 GPLL_MODE_MASK | APLL_MODE_MASK | CPLL_MODE_MASK,
228 GPLL_MODE_NORM << GPLL_MODE_SHIFT |
229 APLL_MODE_NORM << APLL_MODE_SHIFT |
230 CPLL_MODE_NORM << CPLL_MODE_SHIFT);
231
232 /*fix NAND controller working clock max to 150Mhz */
233 rk_clrsetreg(&cru->cru_clksel_con[2],
234 NANDC_PLL_SEL_MASK | NANDC_CLK_DIV_MASK,
235 NANDC_PLL_SEL_GPLL << NANDC_PLL_SEL_SHIFT |
236 3 << NANDC_CLK_DIV_SHIFT);
237}
238
239/* Get pll rate by id */
240static u32 rkclk_pll_get_rate(struct rk3128_cru *cru,
241 enum rk_clk_id clk_id)
242{
243 u32 refdiv, fbdiv, postdiv1, postdiv2;
244 u32 con;
245 int pll_id = rk_pll_id(clk_id);
246 struct rk3128_pll *pll = &cru->pll[pll_id];
247 static u8 clk_shift[CLK_COUNT] = {
248 0xff, APLL_MODE_SHIFT, DPLL_MODE_SHIFT, CPLL_MODE_SHIFT,
249 GPLL_MODE_SHIFT, 0xff
250 };
251 static u32 clk_mask[CLK_COUNT] = {
252 0xff, APLL_MODE_MASK, DPLL_MODE_MASK, CPLL_MODE_MASK,
253 GPLL_MODE_MASK, 0xff
254 };
255 uint shift;
256 uint mask;
257
258 con = readl(&cru->cru_mode_con);
259 shift = clk_shift[clk_id];
260 mask = clk_mask[clk_id];
261
262 switch ((con & mask) >> shift) {
263 case GPLL_MODE_SLOW:
264 return OSC_HZ;
265 case GPLL_MODE_NORM:
266 /* normal mode */
267 con = readl(&pll->con0);
268 postdiv1 = (con & PLL_POSTDIV1_MASK) >> PLL_POSTDIV1_SHIFT;
269 fbdiv = (con & PLL_FBDIV_MASK) >> PLL_FBDIV_SHIFT;
270 con = readl(&pll->con1);
271 postdiv2 = (con & PLL_POSTDIV2_MASK) >> PLL_POSTDIV2_SHIFT;
272 refdiv = (con & PLL_REFDIV_MASK) >> PLL_REFDIV_SHIFT;
273 return (24 * fbdiv / (refdiv * postdiv1 * postdiv2)) * 1000000;
274 case GPLL_MODE_DEEP:
275 default:
276 return 32768;
277 }
278}
279
280static ulong rockchip_mmc_get_clk(struct rk3128_cru *cru, uint clk_general_rate,
281 int periph)
282{
283 uint src_rate;
284 uint div, mux;
285 u32 con;
286
287 switch (periph) {
288 case HCLK_EMMC:
289 case SCLK_EMMC:
290 case SCLK_EMMC_SAMPLE:
291 con = readl(&cru->cru_clksel_con[12]);
292 mux = (con & EMMC_PLL_MASK) >> EMMC_PLL_SHIFT;
293 div = (con & EMMC_DIV_MASK) >> EMMC_DIV_SHIFT;
294 break;
295 case HCLK_SDMMC:
296 case SCLK_SDMMC:
297 con = readl(&cru->cru_clksel_con[11]);
298 mux = (con & MMC0_PLL_MASK) >> MMC0_PLL_SHIFT;
299 div = (con & MMC0_DIV_MASK) >> MMC0_DIV_SHIFT;
300 break;
301 default:
302 return -EINVAL;
303 }
304
305 src_rate = mux == EMMC_SEL_24M ? OSC_HZ : clk_general_rate;
306 return DIV_TO_RATE(src_rate, div);
307}
308
309static ulong rockchip_mmc_set_clk(struct rk3128_cru *cru, uint clk_general_rate,
310 int periph, uint freq)
311{
312 int src_clk_div;
313 int mux;
314
315 debug("%s: clk_general_rate=%u\n", __func__, clk_general_rate);
316
317 /* mmc clock defaulg div 2 internal, need provide double in cru */
318 src_clk_div = DIV_ROUND_UP(clk_general_rate / 2, freq);
319
320 if (src_clk_div > 128) {
321 src_clk_div = DIV_ROUND_UP(OSC_HZ / 2, freq);
322 mux = EMMC_SEL_24M;
323 } else {
324 mux = EMMC_SEL_GPLL;
325 }
326
327 switch (periph) {
328 case HCLK_EMMC:
329 rk_clrsetreg(&cru->cru_clksel_con[12],
330 EMMC_PLL_MASK | EMMC_DIV_MASK,
331 mux << EMMC_PLL_SHIFT |
332 (src_clk_div - 1) << EMMC_DIV_SHIFT);
333 break;
334 case HCLK_SDMMC:
335 case SCLK_SDMMC:
336 rk_clrsetreg(&cru->cru_clksel_con[11],
337 MMC0_PLL_MASK | MMC0_DIV_MASK,
338 mux << MMC0_PLL_SHIFT |
339 (src_clk_div - 1) << MMC0_DIV_SHIFT);
340 break;
341 default:
342 return -EINVAL;
343 }
344
345 return rockchip_mmc_get_clk(cru, clk_general_rate, periph);
346}
347
348static ulong rk3128_peri_get_pclk(struct rk3128_cru *cru, ulong clk_id)
349{
350 u32 div, con;
351
352 switch (clk_id) {
353 case PCLK_I2C0:
354 case PCLK_I2C1:
355 case PCLK_I2C2:
356 case PCLK_I2C3:
357 case PCLK_PWM:
358 con = readl(&cru->cru_clksel_con[10]);
359 div = con >> 12 & 0x3;
360 break;
361 default:
362 printf("do not support this peripheral bus\n");
363 return -EINVAL;
364 }
365
366 return DIV_TO_RATE(PERI_ACLK_HZ, div);
367}
368
369static ulong rk3128_peri_set_pclk(struct rk3128_cru *cru, ulong clk_id, uint hz)
370{
371 int src_clk_div;
372
373 src_clk_div = PERI_ACLK_HZ / hz;
374 assert(src_clk_div - 1 < 4);
375
376 switch (clk_id) {
377 case PCLK_I2C0:
378 case PCLK_I2C1:
379 case PCLK_I2C2:
380 case PCLK_I2C3:
381 case PCLK_PWM:
382 rk_setreg(&cru->cru_clksel_con[10],
383 ((src_clk_div - 1) << 12));
384 break;
385 default:
386 printf("do not support this peripheral bus\n");
387 return -EINVAL;
388 }
389
390 return DIV_TO_RATE(PERI_ACLK_HZ, src_clk_div);
391}
392
393static ulong rk3128_saradc_get_clk(struct rk3128_cru *cru)
394{
395 u32 div, val;
396
397 val = readl(&cru->cru_clksel_con[24]);
398 div = bitfield_extract(val, SARADC_DIV_CON_SHIFT,
399 SARADC_DIV_CON_WIDTH);
400
401 return DIV_TO_RATE(OSC_HZ, div);
402}
403
404static ulong rk3128_saradc_set_clk(struct rk3128_cru *cru, uint hz)
405{
406 int src_clk_div;
407
408 src_clk_div = DIV_ROUND_UP(OSC_HZ, hz) - 1;
409 assert(src_clk_div < 128);
410
411 rk_clrsetreg(&cru->cru_clksel_con[24],
412 SARADC_DIV_CON_MASK,
413 src_clk_div << SARADC_DIV_CON_SHIFT);
414
415 return rk3128_saradc_get_clk(cru);
416}
417
418static ulong rk3128_vop_set_clk(struct rk3128_cru *cru, ulong clk_id, uint hz)
419{
420 int src_clk_div;
421 struct pll_div cpll_config = {0};
422
423 src_clk_div = GPLL_HZ / hz;
424 assert(src_clk_div - 1 < 31);
425
426 switch (clk_id) {
427 case ACLK_VIO0:
428 rk_clrsetreg(&cru->cru_clksel_con[31],
429 VIO0_PLL_MASK | VIO0_DIV_MASK,
430 VIO0_SEL_GPLL << VIO0_PLL_SHIFT |
431 (src_clk_div - 1) << VIO0_DIV_SHIFT);
432 break;
433 case ACLK_VIO1:
434 rk_clrsetreg(&cru->cru_clksel_con[31],
435 VIO1_PLL_MASK | VIO1_DIV_MASK,
436 VIO1_SEL_GPLL << VIO1_PLL_SHIFT |
437 (src_clk_div - 1) << VIO1_DIV_SHIFT);
438 break;
Johan Jonker9d3bfc32022-09-09 22:18:45 +0200439 case DCLK_VOP:
Kever Yang1650a682017-11-28 16:04:17 +0800440 if (pll_para_config(hz, &cpll_config))
441 return -1;
442 rkclk_set_pll(cru, CLK_CODEC, &cpll_config);
443
444 rk_clrsetreg(&cru->cru_clksel_con[27],
445 DCLK_VOP_SEL_MASK | DCLK_VOP_DIV_CON_MASK,
446 DCLK_VOP_PLL_SEL_CPLL << DCLK_VOP_SEL_SHIFT |
447 (1 - 1) << DCLK_VOP_DIV_CON_SHIFT);
448 break;
449 default:
450 printf("do not support this vop freq\n");
451 return -EINVAL;
452 }
453
454 return hz;
455}
456
457static ulong rk3128_vop_get_rate(struct rk3128_cru *cru, ulong clk_id)
458{
459 u32 div, con, parent;
460
461 switch (clk_id) {
462 case ACLK_VIO0:
463 con = readl(&cru->cru_clksel_con[31]);
464 div = con & 0x1f;
465 parent = GPLL_HZ;
466 break;
467 case ACLK_VIO1:
468 con = readl(&cru->cru_clksel_con[31]);
469 div = (con >> 8) & 0x1f;
470 parent = GPLL_HZ;
471 break;
Johan Jonker9d3bfc32022-09-09 22:18:45 +0200472 case DCLK_VOP:
Kever Yang1650a682017-11-28 16:04:17 +0800473 con = readl(&cru->cru_clksel_con[27]);
474 div = (con >> 8) & 0xfff;
475 parent = rkclk_pll_get_rate(cru, CLK_CODEC);
476 break;
477 default:
478 return -ENOENT;
479 }
480 return DIV_TO_RATE(parent, div);
481}
482
483static ulong rk3128_clk_get_rate(struct clk *clk)
484{
485 struct rk3128_clk_priv *priv = dev_get_priv(clk->dev);
486
487 switch (clk->id) {
488 case 0 ... 63:
489 return rkclk_pll_get_rate(priv->cru, clk->id);
490 case PCLK_I2C0:
491 case PCLK_I2C1:
492 case PCLK_I2C2:
493 case PCLK_I2C3:
494 case PCLK_PWM:
495 return rk3128_peri_get_pclk(priv->cru, clk->id);
496 case SCLK_SARADC:
497 return rk3128_saradc_get_clk(priv->cru);
Johan Jonker9d3bfc32022-09-09 22:18:45 +0200498 case DCLK_VOP:
Kever Yang1650a682017-11-28 16:04:17 +0800499 case ACLK_VIO0:
500 case ACLK_VIO1:
501 return rk3128_vop_get_rate(priv->cru, clk->id);
502 default:
503 return -ENOENT;
504 }
505}
506
507static ulong rk3128_clk_set_rate(struct clk *clk, ulong rate)
508{
509 struct rk3128_clk_priv *priv = dev_get_priv(clk->dev);
510 ulong new_rate, gclk_rate;
511
512 gclk_rate = rkclk_pll_get_rate(priv->cru, CLK_GENERAL);
513 switch (clk->id) {
514 case 0 ... 63:
515 return 0;
Johan Jonker9d3bfc32022-09-09 22:18:45 +0200516 case DCLK_VOP:
Kever Yang1650a682017-11-28 16:04:17 +0800517 case ACLK_VIO0:
518 case ACLK_VIO1:
519 new_rate = rk3128_vop_set_clk(priv->cru,
520 clk->id, rate);
521 break;
522 case HCLK_EMMC:
523 new_rate = rockchip_mmc_set_clk(priv->cru, gclk_rate,
524 clk->id, rate);
525 break;
526 case PCLK_I2C0:
527 case PCLK_I2C1:
528 case PCLK_I2C2:
529 case PCLK_I2C3:
530 case PCLK_PWM:
531 new_rate = rk3128_peri_set_pclk(priv->cru, clk->id, rate);
532 break;
533 case SCLK_SARADC:
534 new_rate = rk3128_saradc_set_clk(priv->cru, rate);
535 break;
536 default:
537 return -ENOENT;
538 }
539
540 return new_rate;
541}
542
543static struct clk_ops rk3128_clk_ops = {
544 .get_rate = rk3128_clk_get_rate,
545 .set_rate = rk3128_clk_set_rate,
546};
547
Simon Glassaad29ae2020-12-03 16:55:21 -0700548static int rk3128_clk_of_to_plat(struct udevice *dev)
Kever Yang481cbed2018-04-24 11:27:07 +0800549{
550 struct rk3128_clk_priv *priv = dev_get_priv(dev);
551
552 priv->cru = dev_read_addr_ptr(dev);
553
554 return 0;
555}
556
Kever Yang1650a682017-11-28 16:04:17 +0800557static int rk3128_clk_probe(struct udevice *dev)
558{
559 struct rk3128_clk_priv *priv = dev_get_priv(dev);
560
Kever Yang1650a682017-11-28 16:04:17 +0800561 rkclk_init(priv->cru);
562
563 return 0;
564}
565
566static int rk3128_clk_bind(struct udevice *dev)
567{
568 int ret;
569 struct udevice *sys_child;
570 struct sysreset_reg *priv;
571
572 /* The reset driver does not have a device node, so bind it here */
573 ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset",
574 &sys_child);
575 if (ret) {
576 debug("Warning: No sysreset driver: ret=%d\n", ret);
577 } else {
578 priv = malloc(sizeof(struct sysreset_reg));
579 priv->glb_srst_fst_value = offsetof(struct rk3128_cru,
580 cru_glb_srst_fst_value);
581 priv->glb_srst_snd_value = offsetof(struct rk3128_cru,
582 cru_glb_srst_snd_value);
Simon Glass95588622020-12-22 19:30:28 -0700583 dev_set_priv(sys_child, priv);
Kever Yang1650a682017-11-28 16:04:17 +0800584 }
585
586 return 0;
587}
588
589static const struct udevice_id rk3128_clk_ids[] = {
590 { .compatible = "rockchip,rk3128-cru" },
591 { .compatible = "rockchip,rk3126-cru" },
592 { }
593};
594
595U_BOOT_DRIVER(rockchip_rk3128_cru) = {
596 .name = "clk_rk3128",
597 .id = UCLASS_CLK,
598 .of_match = rk3128_clk_ids,
Simon Glass8a2b47f2020-12-03 16:55:17 -0700599 .priv_auto = sizeof(struct rk3128_clk_priv),
Simon Glassaad29ae2020-12-03 16:55:21 -0700600 .of_to_plat = rk3128_clk_of_to_plat,
Kever Yang1650a682017-11-28 16:04:17 +0800601 .ops = &rk3128_clk_ops,
602 .bind = rk3128_clk_bind,
603 .probe = rk3128_clk_probe,
604};