blob: 182754e7052decf8e45cf1770c78cbed894abac5 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Kever Yang1650a682017-11-28 16:04:17 +08002/*
3 * (C) Copyright 2017 Rockchip Electronics Co., Ltd
Kever Yang1650a682017-11-28 16:04:17 +08004 */
5
6#include <common.h>
7#include <clk-uclass.h>
8#include <dm.h>
9#include <errno.h>
Simon Glass0f2af882020-05-10 11:40:05 -060010#include <log.h>
Simon Glass9bc15642020-02-03 07:36:16 -070011#include <malloc.h>
Kever Yang1650a682017-11-28 16:04:17 +080012#include <syscon.h>
Kever Yang9fbe17c2019-03-28 11:01:23 +080013#include <asm/arch-rockchip/clock.h>
14#include <asm/arch-rockchip/cru_rk3128.h>
15#include <asm/arch-rockchip/hardware.h>
Kever Yang1650a682017-11-28 16:04:17 +080016#include <bitfield.h>
Simon Glass95588622020-12-22 19:30:28 -070017#include <dm/device-internal.h>
Kever Yang1650a682017-11-28 16:04:17 +080018#include <dm/lists.h>
19#include <dt-bindings/clock/rk3128-cru.h>
Simon Glassdbd79542020-05-10 11:40:11 -060020#include <linux/delay.h>
Kever Yang1650a682017-11-28 16:04:17 +080021#include <linux/log2.h>
22
Kever Yang1650a682017-11-28 16:04:17 +080023enum {
24 VCO_MAX_HZ = 2400U * 1000000,
25 VCO_MIN_HZ = 600 * 1000000,
26 OUTPUT_MAX_HZ = 2400U * 1000000,
27 OUTPUT_MIN_HZ = 24 * 1000000,
28};
29
30#define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
31
32#define PLL_DIVISORS(hz, _refdiv, _postdiv1, _postdiv2) {\
33 .refdiv = _refdiv,\
34 .fbdiv = (u32)((u64)hz * _refdiv * _postdiv1 * _postdiv2 / OSC_HZ),\
35 .postdiv1 = _postdiv1, .postdiv2 = _postdiv2};
36
37/* use integer mode*/
38static const struct pll_div apll_init_cfg = PLL_DIVISORS(APLL_HZ, 1, 3, 1);
39static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 2, 2, 1);
40
41static int rkclk_set_pll(struct rk3128_cru *cru, enum rk_clk_id clk_id,
42 const struct pll_div *div)
43{
44 int pll_id = rk_pll_id(clk_id);
45 struct rk3128_pll *pll = &cru->pll[pll_id];
46
47 /* All PLLs have same VCO and output frequency range restrictions. */
48 uint vco_hz = OSC_HZ / 1000 * div->fbdiv / div->refdiv * 1000;
49 uint output_hz = vco_hz / div->postdiv1 / div->postdiv2;
50
51 debug("PLL at %p:fd=%d,rd=%d,pd1=%d,pd2=%d,vco=%uHz,output=%uHz\n",
52 pll, div->fbdiv, div->refdiv, div->postdiv1,
53 div->postdiv2, vco_hz, output_hz);
54 assert(vco_hz >= VCO_MIN_HZ && vco_hz <= VCO_MAX_HZ &&
55 output_hz >= OUTPUT_MIN_HZ && output_hz <= OUTPUT_MAX_HZ);
56
57 /* use integer mode */
58 rk_setreg(&pll->con1, 1 << PLL_DSMPD_SHIFT);
59 /* Power down */
60 rk_setreg(&pll->con1, 1 << PLL_PD_SHIFT);
61
62 rk_clrsetreg(&pll->con0,
63 PLL_POSTDIV1_MASK | PLL_FBDIV_MASK,
64 (div->postdiv1 << PLL_POSTDIV1_SHIFT) | div->fbdiv);
65 rk_clrsetreg(&pll->con1, PLL_POSTDIV2_MASK | PLL_REFDIV_MASK,
66 (div->postdiv2 << PLL_POSTDIV2_SHIFT |
67 div->refdiv << PLL_REFDIV_SHIFT));
68
69 /* Power Up */
70 rk_clrreg(&pll->con1, 1 << PLL_PD_SHIFT);
71
72 /* waiting for pll lock */
73 while (readl(&pll->con1) & (1 << PLL_LOCK_STATUS_SHIFT))
74 udelay(1);
75
76 return 0;
77}
78
79static int pll_para_config(u32 freq_hz, struct pll_div *div)
80{
81 u32 ref_khz = OSC_HZ / 1000, refdiv, fbdiv = 0;
82 u32 postdiv1, postdiv2 = 1;
83 u32 fref_khz;
84 u32 diff_khz, best_diff_khz;
85 const u32 max_refdiv = 63, max_fbdiv = 3200, min_fbdiv = 16;
86 const u32 max_postdiv1 = 7, max_postdiv2 = 7;
87 u32 vco_khz;
88 u32 freq_khz = freq_hz / 1000;
89
90 if (!freq_hz) {
91 printf("%s: the frequency can't be 0 Hz\n", __func__);
92 return -1;
93 }
94
95 postdiv1 = DIV_ROUND_UP(VCO_MIN_HZ / 1000, freq_khz);
96 if (postdiv1 > max_postdiv1) {
97 postdiv2 = DIV_ROUND_UP(postdiv1, max_postdiv1);
98 postdiv1 = DIV_ROUND_UP(postdiv1, postdiv2);
99 }
100
101 vco_khz = freq_khz * postdiv1 * postdiv2;
102
103 if (vco_khz < (VCO_MIN_HZ / 1000) || vco_khz > (VCO_MAX_HZ / 1000) ||
104 postdiv2 > max_postdiv2) {
105 printf("%s: Cannot find out a supported VCO for Freq (%uHz)\n",
106 __func__, freq_hz);
107 return -1;
108 }
109
110 div->postdiv1 = postdiv1;
111 div->postdiv2 = postdiv2;
112
113 best_diff_khz = vco_khz;
114 for (refdiv = 1; refdiv < max_refdiv && best_diff_khz; refdiv++) {
115 fref_khz = ref_khz / refdiv;
116
117 fbdiv = vco_khz / fref_khz;
118 if ((fbdiv >= max_fbdiv) || (fbdiv <= min_fbdiv))
119 continue;
120 diff_khz = vco_khz - fbdiv * fref_khz;
121 if (fbdiv + 1 < max_fbdiv && diff_khz > fref_khz / 2) {
122 fbdiv++;
123 diff_khz = fref_khz - diff_khz;
124 }
125
126 if (diff_khz >= best_diff_khz)
127 continue;
128
129 best_diff_khz = diff_khz;
130 div->refdiv = refdiv;
131 div->fbdiv = fbdiv;
132 }
133
134 if (best_diff_khz > 4 * (1000)) {
135 printf("%s: Failed to match output frequency %u bestis %u Hz\n",
136 __func__, freq_hz,
137 best_diff_khz * 1000);
138 return -1;
139 }
140 return 0;
141}
142
143static void rkclk_init(struct rk3128_cru *cru)
144{
145 u32 aclk_div;
146 u32 hclk_div;
147 u32 pclk_div;
148
149 /* pll enter slow-mode */
150 rk_clrsetreg(&cru->cru_mode_con,
151 GPLL_MODE_MASK | APLL_MODE_MASK,
152 GPLL_MODE_SLOW << GPLL_MODE_SHIFT |
153 APLL_MODE_SLOW << APLL_MODE_SHIFT);
154
155 /* init pll */
156 rkclk_set_pll(cru, CLK_ARM, &apll_init_cfg);
157 rkclk_set_pll(cru, CLK_GENERAL, &gpll_init_cfg);
158
159 /*
160 * select apll as cpu/core clock pll source and
161 * set up dependent divisors for PERI and ACLK clocks.
162 * core hz : apll = 1:1
163 */
164 aclk_div = APLL_HZ / CORE_ACLK_HZ - 1;
165 assert((aclk_div + 1) * CORE_ACLK_HZ == APLL_HZ && aclk_div < 0x7);
166
167 pclk_div = APLL_HZ / CORE_PERI_HZ - 1;
168 assert((pclk_div + 1) * CORE_PERI_HZ == APLL_HZ && pclk_div < 0xf);
169
170 rk_clrsetreg(&cru->cru_clksel_con[0],
171 CORE_CLK_PLL_SEL_MASK | CORE_DIV_CON_MASK,
172 CORE_CLK_PLL_SEL_APLL << CORE_CLK_PLL_SEL_SHIFT |
173 0 << CORE_DIV_CON_SHIFT);
174
175 rk_clrsetreg(&cru->cru_clksel_con[1],
176 CORE_ACLK_DIV_MASK | CORE_PERI_DIV_MASK,
177 aclk_div << CORE_ACLK_DIV_SHIFT |
178 pclk_div << CORE_PERI_DIV_SHIFT);
179
180 /*
181 * select gpll as pd_bus bus clock source and
182 * set up dependent divisors for PCLK/HCLK and ACLK clocks.
183 */
184 aclk_div = GPLL_HZ / BUS_ACLK_HZ - 1;
185 assert((aclk_div + 1) * BUS_ACLK_HZ == GPLL_HZ && aclk_div <= 0x1f);
186
187 pclk_div = BUS_ACLK_HZ / BUS_PCLK_HZ - 1;
188 assert((pclk_div + 1) * BUS_PCLK_HZ == BUS_ACLK_HZ && pclk_div <= 0x7);
189
190 hclk_div = BUS_ACLK_HZ / BUS_HCLK_HZ - 1;
191 assert((hclk_div + 1) * BUS_HCLK_HZ == BUS_ACLK_HZ && hclk_div <= 0x3);
192
193 rk_clrsetreg(&cru->cru_clksel_con[0],
194 BUS_ACLK_PLL_SEL_MASK | BUS_ACLK_DIV_MASK,
195 BUS_ACLK_PLL_SEL_GPLL << BUS_ACLK_PLL_SEL_SHIFT |
196 aclk_div << BUS_ACLK_DIV_SHIFT);
197
198 rk_clrsetreg(&cru->cru_clksel_con[1],
199 BUS_PCLK_DIV_MASK | BUS_HCLK_DIV_MASK,
200 pclk_div << BUS_PCLK_DIV_SHIFT |
201 hclk_div << BUS_HCLK_DIV_SHIFT);
202
203 /*
204 * select gpll as pd_peri bus clock source and
205 * set up dependent divisors for PCLK/HCLK and ACLK clocks.
206 */
207 aclk_div = GPLL_HZ / PERI_ACLK_HZ - 1;
208 assert((aclk_div + 1) * PERI_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
209
210 hclk_div = ilog2(PERI_ACLK_HZ / PERI_HCLK_HZ);
211 assert((1 << hclk_div) * PERI_HCLK_HZ ==
212 PERI_ACLK_HZ && (hclk_div < 0x4));
213
214 pclk_div = ilog2(PERI_ACLK_HZ / PERI_PCLK_HZ);
215 assert((1 << pclk_div) * PERI_PCLK_HZ ==
216 PERI_ACLK_HZ && pclk_div < 0x8);
217
218 rk_clrsetreg(&cru->cru_clksel_con[10],
219 PERI_PLL_SEL_MASK | PERI_PCLK_DIV_MASK |
220 PERI_HCLK_DIV_MASK | PERI_ACLK_DIV_MASK,
221 PERI_PLL_GPLL << PERI_PLL_SEL_SHIFT |
222 pclk_div << PERI_PCLK_DIV_SHIFT |
223 hclk_div << PERI_HCLK_DIV_SHIFT |
224 aclk_div << PERI_ACLK_DIV_SHIFT);
225
226 /* PLL enter normal-mode */
227 rk_clrsetreg(&cru->cru_mode_con,
228 GPLL_MODE_MASK | APLL_MODE_MASK | CPLL_MODE_MASK,
229 GPLL_MODE_NORM << GPLL_MODE_SHIFT |
230 APLL_MODE_NORM << APLL_MODE_SHIFT |
231 CPLL_MODE_NORM << CPLL_MODE_SHIFT);
232
233 /*fix NAND controller working clock max to 150Mhz */
234 rk_clrsetreg(&cru->cru_clksel_con[2],
235 NANDC_PLL_SEL_MASK | NANDC_CLK_DIV_MASK,
236 NANDC_PLL_SEL_GPLL << NANDC_PLL_SEL_SHIFT |
237 3 << NANDC_CLK_DIV_SHIFT);
238}
239
240/* Get pll rate by id */
241static u32 rkclk_pll_get_rate(struct rk3128_cru *cru,
242 enum rk_clk_id clk_id)
243{
244 u32 refdiv, fbdiv, postdiv1, postdiv2;
245 u32 con;
246 int pll_id = rk_pll_id(clk_id);
247 struct rk3128_pll *pll = &cru->pll[pll_id];
248 static u8 clk_shift[CLK_COUNT] = {
249 0xff, APLL_MODE_SHIFT, DPLL_MODE_SHIFT, CPLL_MODE_SHIFT,
250 GPLL_MODE_SHIFT, 0xff
251 };
252 static u32 clk_mask[CLK_COUNT] = {
253 0xff, APLL_MODE_MASK, DPLL_MODE_MASK, CPLL_MODE_MASK,
254 GPLL_MODE_MASK, 0xff
255 };
256 uint shift;
257 uint mask;
258
259 con = readl(&cru->cru_mode_con);
260 shift = clk_shift[clk_id];
261 mask = clk_mask[clk_id];
262
263 switch ((con & mask) >> shift) {
264 case GPLL_MODE_SLOW:
265 return OSC_HZ;
266 case GPLL_MODE_NORM:
267 /* normal mode */
268 con = readl(&pll->con0);
269 postdiv1 = (con & PLL_POSTDIV1_MASK) >> PLL_POSTDIV1_SHIFT;
270 fbdiv = (con & PLL_FBDIV_MASK) >> PLL_FBDIV_SHIFT;
271 con = readl(&pll->con1);
272 postdiv2 = (con & PLL_POSTDIV2_MASK) >> PLL_POSTDIV2_SHIFT;
273 refdiv = (con & PLL_REFDIV_MASK) >> PLL_REFDIV_SHIFT;
274 return (24 * fbdiv / (refdiv * postdiv1 * postdiv2)) * 1000000;
275 case GPLL_MODE_DEEP:
276 default:
277 return 32768;
278 }
279}
280
281static ulong rockchip_mmc_get_clk(struct rk3128_cru *cru, uint clk_general_rate,
282 int periph)
283{
284 uint src_rate;
285 uint div, mux;
286 u32 con;
287
288 switch (periph) {
289 case HCLK_EMMC:
290 case SCLK_EMMC:
291 case SCLK_EMMC_SAMPLE:
292 con = readl(&cru->cru_clksel_con[12]);
293 mux = (con & EMMC_PLL_MASK) >> EMMC_PLL_SHIFT;
294 div = (con & EMMC_DIV_MASK) >> EMMC_DIV_SHIFT;
295 break;
296 case HCLK_SDMMC:
297 case SCLK_SDMMC:
298 con = readl(&cru->cru_clksel_con[11]);
299 mux = (con & MMC0_PLL_MASK) >> MMC0_PLL_SHIFT;
300 div = (con & MMC0_DIV_MASK) >> MMC0_DIV_SHIFT;
301 break;
302 default:
303 return -EINVAL;
304 }
305
306 src_rate = mux == EMMC_SEL_24M ? OSC_HZ : clk_general_rate;
307 return DIV_TO_RATE(src_rate, div);
308}
309
310static ulong rockchip_mmc_set_clk(struct rk3128_cru *cru, uint clk_general_rate,
311 int periph, uint freq)
312{
313 int src_clk_div;
314 int mux;
315
316 debug("%s: clk_general_rate=%u\n", __func__, clk_general_rate);
317
318 /* mmc clock defaulg div 2 internal, need provide double in cru */
319 src_clk_div = DIV_ROUND_UP(clk_general_rate / 2, freq);
320
321 if (src_clk_div > 128) {
322 src_clk_div = DIV_ROUND_UP(OSC_HZ / 2, freq);
323 mux = EMMC_SEL_24M;
324 } else {
325 mux = EMMC_SEL_GPLL;
326 }
327
328 switch (periph) {
329 case HCLK_EMMC:
330 rk_clrsetreg(&cru->cru_clksel_con[12],
331 EMMC_PLL_MASK | EMMC_DIV_MASK,
332 mux << EMMC_PLL_SHIFT |
333 (src_clk_div - 1) << EMMC_DIV_SHIFT);
334 break;
335 case HCLK_SDMMC:
336 case SCLK_SDMMC:
337 rk_clrsetreg(&cru->cru_clksel_con[11],
338 MMC0_PLL_MASK | MMC0_DIV_MASK,
339 mux << MMC0_PLL_SHIFT |
340 (src_clk_div - 1) << MMC0_DIV_SHIFT);
341 break;
342 default:
343 return -EINVAL;
344 }
345
346 return rockchip_mmc_get_clk(cru, clk_general_rate, periph);
347}
348
349static ulong rk3128_peri_get_pclk(struct rk3128_cru *cru, ulong clk_id)
350{
351 u32 div, con;
352
353 switch (clk_id) {
354 case PCLK_I2C0:
355 case PCLK_I2C1:
356 case PCLK_I2C2:
357 case PCLK_I2C3:
358 case PCLK_PWM:
359 con = readl(&cru->cru_clksel_con[10]);
360 div = con >> 12 & 0x3;
361 break;
362 default:
363 printf("do not support this peripheral bus\n");
364 return -EINVAL;
365 }
366
367 return DIV_TO_RATE(PERI_ACLK_HZ, div);
368}
369
370static ulong rk3128_peri_set_pclk(struct rk3128_cru *cru, ulong clk_id, uint hz)
371{
372 int src_clk_div;
373
374 src_clk_div = PERI_ACLK_HZ / hz;
375 assert(src_clk_div - 1 < 4);
376
377 switch (clk_id) {
378 case PCLK_I2C0:
379 case PCLK_I2C1:
380 case PCLK_I2C2:
381 case PCLK_I2C3:
382 case PCLK_PWM:
383 rk_setreg(&cru->cru_clksel_con[10],
384 ((src_clk_div - 1) << 12));
385 break;
386 default:
387 printf("do not support this peripheral bus\n");
388 return -EINVAL;
389 }
390
391 return DIV_TO_RATE(PERI_ACLK_HZ, src_clk_div);
392}
393
394static ulong rk3128_saradc_get_clk(struct rk3128_cru *cru)
395{
396 u32 div, val;
397
398 val = readl(&cru->cru_clksel_con[24]);
399 div = bitfield_extract(val, SARADC_DIV_CON_SHIFT,
400 SARADC_DIV_CON_WIDTH);
401
402 return DIV_TO_RATE(OSC_HZ, div);
403}
404
405static ulong rk3128_saradc_set_clk(struct rk3128_cru *cru, uint hz)
406{
407 int src_clk_div;
408
409 src_clk_div = DIV_ROUND_UP(OSC_HZ, hz) - 1;
410 assert(src_clk_div < 128);
411
412 rk_clrsetreg(&cru->cru_clksel_con[24],
413 SARADC_DIV_CON_MASK,
414 src_clk_div << SARADC_DIV_CON_SHIFT);
415
416 return rk3128_saradc_get_clk(cru);
417}
418
419static ulong rk3128_vop_set_clk(struct rk3128_cru *cru, ulong clk_id, uint hz)
420{
421 int src_clk_div;
422 struct pll_div cpll_config = {0};
423
424 src_clk_div = GPLL_HZ / hz;
425 assert(src_clk_div - 1 < 31);
426
427 switch (clk_id) {
428 case ACLK_VIO0:
429 rk_clrsetreg(&cru->cru_clksel_con[31],
430 VIO0_PLL_MASK | VIO0_DIV_MASK,
431 VIO0_SEL_GPLL << VIO0_PLL_SHIFT |
432 (src_clk_div - 1) << VIO0_DIV_SHIFT);
433 break;
434 case ACLK_VIO1:
435 rk_clrsetreg(&cru->cru_clksel_con[31],
436 VIO1_PLL_MASK | VIO1_DIV_MASK,
437 VIO1_SEL_GPLL << VIO1_PLL_SHIFT |
438 (src_clk_div - 1) << VIO1_DIV_SHIFT);
439 break;
Johan Jonker9d3bfc32022-09-09 22:18:45 +0200440 case DCLK_VOP:
Kever Yang1650a682017-11-28 16:04:17 +0800441 if (pll_para_config(hz, &cpll_config))
442 return -1;
443 rkclk_set_pll(cru, CLK_CODEC, &cpll_config);
444
445 rk_clrsetreg(&cru->cru_clksel_con[27],
446 DCLK_VOP_SEL_MASK | DCLK_VOP_DIV_CON_MASK,
447 DCLK_VOP_PLL_SEL_CPLL << DCLK_VOP_SEL_SHIFT |
448 (1 - 1) << DCLK_VOP_DIV_CON_SHIFT);
449 break;
450 default:
451 printf("do not support this vop freq\n");
452 return -EINVAL;
453 }
454
455 return hz;
456}
457
458static ulong rk3128_vop_get_rate(struct rk3128_cru *cru, ulong clk_id)
459{
460 u32 div, con, parent;
461
462 switch (clk_id) {
463 case ACLK_VIO0:
464 con = readl(&cru->cru_clksel_con[31]);
465 div = con & 0x1f;
466 parent = GPLL_HZ;
467 break;
468 case ACLK_VIO1:
469 con = readl(&cru->cru_clksel_con[31]);
470 div = (con >> 8) & 0x1f;
471 parent = GPLL_HZ;
472 break;
Johan Jonker9d3bfc32022-09-09 22:18:45 +0200473 case DCLK_VOP:
Kever Yang1650a682017-11-28 16:04:17 +0800474 con = readl(&cru->cru_clksel_con[27]);
475 div = (con >> 8) & 0xfff;
476 parent = rkclk_pll_get_rate(cru, CLK_CODEC);
477 break;
478 default:
479 return -ENOENT;
480 }
481 return DIV_TO_RATE(parent, div);
482}
483
484static ulong rk3128_clk_get_rate(struct clk *clk)
485{
486 struct rk3128_clk_priv *priv = dev_get_priv(clk->dev);
487
488 switch (clk->id) {
489 case 0 ... 63:
490 return rkclk_pll_get_rate(priv->cru, clk->id);
491 case PCLK_I2C0:
492 case PCLK_I2C1:
493 case PCLK_I2C2:
494 case PCLK_I2C3:
495 case PCLK_PWM:
496 return rk3128_peri_get_pclk(priv->cru, clk->id);
497 case SCLK_SARADC:
498 return rk3128_saradc_get_clk(priv->cru);
Johan Jonker9d3bfc32022-09-09 22:18:45 +0200499 case DCLK_VOP:
Kever Yang1650a682017-11-28 16:04:17 +0800500 case ACLK_VIO0:
501 case ACLK_VIO1:
502 return rk3128_vop_get_rate(priv->cru, clk->id);
503 default:
504 return -ENOENT;
505 }
506}
507
508static ulong rk3128_clk_set_rate(struct clk *clk, ulong rate)
509{
510 struct rk3128_clk_priv *priv = dev_get_priv(clk->dev);
511 ulong new_rate, gclk_rate;
512
513 gclk_rate = rkclk_pll_get_rate(priv->cru, CLK_GENERAL);
514 switch (clk->id) {
515 case 0 ... 63:
516 return 0;
Johan Jonker9d3bfc32022-09-09 22:18:45 +0200517 case DCLK_VOP:
Kever Yang1650a682017-11-28 16:04:17 +0800518 case ACLK_VIO0:
519 case ACLK_VIO1:
520 new_rate = rk3128_vop_set_clk(priv->cru,
521 clk->id, rate);
522 break;
523 case HCLK_EMMC:
524 new_rate = rockchip_mmc_set_clk(priv->cru, gclk_rate,
525 clk->id, rate);
526 break;
527 case PCLK_I2C0:
528 case PCLK_I2C1:
529 case PCLK_I2C2:
530 case PCLK_I2C3:
531 case PCLK_PWM:
532 new_rate = rk3128_peri_set_pclk(priv->cru, clk->id, rate);
533 break;
534 case SCLK_SARADC:
535 new_rate = rk3128_saradc_set_clk(priv->cru, rate);
536 break;
537 default:
538 return -ENOENT;
539 }
540
541 return new_rate;
542}
543
544static struct clk_ops rk3128_clk_ops = {
545 .get_rate = rk3128_clk_get_rate,
546 .set_rate = rk3128_clk_set_rate,
547};
548
Simon Glassaad29ae2020-12-03 16:55:21 -0700549static int rk3128_clk_of_to_plat(struct udevice *dev)
Kever Yang481cbed2018-04-24 11:27:07 +0800550{
551 struct rk3128_clk_priv *priv = dev_get_priv(dev);
552
553 priv->cru = dev_read_addr_ptr(dev);
554
555 return 0;
556}
557
Kever Yang1650a682017-11-28 16:04:17 +0800558static int rk3128_clk_probe(struct udevice *dev)
559{
560 struct rk3128_clk_priv *priv = dev_get_priv(dev);
561
Kever Yang1650a682017-11-28 16:04:17 +0800562 rkclk_init(priv->cru);
563
564 return 0;
565}
566
567static int rk3128_clk_bind(struct udevice *dev)
568{
569 int ret;
570 struct udevice *sys_child;
571 struct sysreset_reg *priv;
572
573 /* The reset driver does not have a device node, so bind it here */
574 ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset",
575 &sys_child);
576 if (ret) {
577 debug("Warning: No sysreset driver: ret=%d\n", ret);
578 } else {
579 priv = malloc(sizeof(struct sysreset_reg));
580 priv->glb_srst_fst_value = offsetof(struct rk3128_cru,
581 cru_glb_srst_fst_value);
582 priv->glb_srst_snd_value = offsetof(struct rk3128_cru,
583 cru_glb_srst_snd_value);
Simon Glass95588622020-12-22 19:30:28 -0700584 dev_set_priv(sys_child, priv);
Kever Yang1650a682017-11-28 16:04:17 +0800585 }
586
587 return 0;
588}
589
590static const struct udevice_id rk3128_clk_ids[] = {
591 { .compatible = "rockchip,rk3128-cru" },
592 { .compatible = "rockchip,rk3126-cru" },
593 { }
594};
595
596U_BOOT_DRIVER(rockchip_rk3128_cru) = {
597 .name = "clk_rk3128",
598 .id = UCLASS_CLK,
599 .of_match = rk3128_clk_ids,
Simon Glass8a2b47f2020-12-03 16:55:17 -0700600 .priv_auto = sizeof(struct rk3128_clk_priv),
Simon Glassaad29ae2020-12-03 16:55:21 -0700601 .of_to_plat = rk3128_clk_of_to_plat,
Kever Yang1650a682017-11-28 16:04:17 +0800602 .ops = &rk3128_clk_ops,
603 .bind = rk3128_clk_bind,
604 .probe = rk3128_clk_probe,
605};