blob: cab2bd994331dfa1f0afc053d08c97a2b4dc9a21 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Kever Yangca19eac2016-07-29 10:35:25 +08002/*
3 * (C) Copyright 2015 Google, Inc
Philipp Tomsichc31ee922017-04-20 22:05:49 +02004 * (C) 2017 Theobroma Systems Design und Consulting GmbH
Kever Yangca19eac2016-07-29 10:35:25 +08005 */
6
7#include <common.h>
8#include <clk-uclass.h>
9#include <dm.h>
Kever Yange1980532017-02-13 17:38:56 +080010#include <dt-structs.h>
Kever Yangca19eac2016-07-29 10:35:25 +080011#include <errno.h>
Kever Yange1980532017-02-13 17:38:56 +080012#include <mapmem.h>
Kever Yangca19eac2016-07-29 10:35:25 +080013#include <syscon.h>
David Wuf91b9b42017-09-20 14:38:58 +080014#include <bitfield.h>
Kever Yangca19eac2016-07-29 10:35:25 +080015#include <asm/io.h>
16#include <asm/arch/clock.h>
17#include <asm/arch/cru_rk3399.h>
18#include <asm/arch/hardware.h>
19#include <dm/lists.h>
20#include <dt-bindings/clock/rk3399-cru.h>
21
Kever Yange1980532017-02-13 17:38:56 +080022#if CONFIG_IS_ENABLED(OF_PLATDATA)
23struct rk3399_clk_plat {
24 struct dtd_rockchip_rk3399_cru dtd;
Kever Yange54d26a2016-08-12 17:47:15 +080025};
26
Kever Yange1980532017-02-13 17:38:56 +080027struct rk3399_pmuclk_plat {
28 struct dtd_rockchip_rk3399_pmucru dtd;
29};
30#endif
31
Kever Yangca19eac2016-07-29 10:35:25 +080032struct pll_div {
33 u32 refdiv;
34 u32 fbdiv;
35 u32 postdiv1;
36 u32 postdiv2;
37 u32 frac;
38};
39
40#define RATE_TO_DIV(input_rate, output_rate) \
41 ((input_rate) / (output_rate) - 1);
42#define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
43
44#define PLL_DIVISORS(hz, _refdiv, _postdiv1, _postdiv2) {\
45 .refdiv = _refdiv,\
46 .fbdiv = (u32)((u64)hz * _refdiv * _postdiv1 * _postdiv2 / OSC_HZ),\
47 .postdiv1 = _postdiv1, .postdiv2 = _postdiv2};
48
Philipp Tomsichcf0a4ba2017-03-24 19:24:24 +010049#if defined(CONFIG_SPL_BUILD)
Kever Yangca19eac2016-07-29 10:35:25 +080050static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 2, 2, 1);
51static const struct pll_div cpll_init_cfg = PLL_DIVISORS(CPLL_HZ, 1, 2, 2);
Philipp Tomsichcf0a4ba2017-03-24 19:24:24 +010052#else
Kever Yangca19eac2016-07-29 10:35:25 +080053static const struct pll_div ppll_init_cfg = PLL_DIVISORS(PPLL_HZ, 2, 2, 1);
Philipp Tomsichcf0a4ba2017-03-24 19:24:24 +010054#endif
Kever Yangca19eac2016-07-29 10:35:25 +080055
56static const struct pll_div apll_l_1600_cfg = PLL_DIVISORS(1600*MHz, 3, 1, 1);
57static const struct pll_div apll_l_600_cfg = PLL_DIVISORS(600*MHz, 1, 2, 1);
58
59static const struct pll_div *apll_l_cfgs[] = {
60 [APLL_L_1600_MHZ] = &apll_l_1600_cfg,
61 [APLL_L_600_MHZ] = &apll_l_600_cfg,
62};
63
Christoph Muellner25c7ba92018-11-30 20:32:48 +010064static const struct pll_div apll_b_600_cfg = PLL_DIVISORS(600*MHz, 1, 2, 1);
65static const struct pll_div *apll_b_cfgs[] = {
66 [APLL_B_600_MHZ] = &apll_b_600_cfg,
67};
68
Kever Yangca19eac2016-07-29 10:35:25 +080069enum {
70 /* PLL_CON0 */
71 PLL_FBDIV_MASK = 0xfff,
72 PLL_FBDIV_SHIFT = 0,
73
74 /* PLL_CON1 */
75 PLL_POSTDIV2_SHIFT = 12,
76 PLL_POSTDIV2_MASK = 0x7 << PLL_POSTDIV2_SHIFT,
77 PLL_POSTDIV1_SHIFT = 8,
78 PLL_POSTDIV1_MASK = 0x7 << PLL_POSTDIV1_SHIFT,
79 PLL_REFDIV_MASK = 0x3f,
80 PLL_REFDIV_SHIFT = 0,
81
82 /* PLL_CON2 */
83 PLL_LOCK_STATUS_SHIFT = 31,
84 PLL_LOCK_STATUS_MASK = 1 << PLL_LOCK_STATUS_SHIFT,
85 PLL_FRACDIV_MASK = 0xffffff,
86 PLL_FRACDIV_SHIFT = 0,
87
88 /* PLL_CON3 */
89 PLL_MODE_SHIFT = 8,
90 PLL_MODE_MASK = 3 << PLL_MODE_SHIFT,
91 PLL_MODE_SLOW = 0,
92 PLL_MODE_NORM,
93 PLL_MODE_DEEP,
94 PLL_DSMPD_SHIFT = 3,
95 PLL_DSMPD_MASK = 1 << PLL_DSMPD_SHIFT,
96 PLL_INTEGER_MODE = 1,
97
98 /* PMUCRU_CLKSEL_CON0 */
99 PMU_PCLK_DIV_CON_MASK = 0x1f,
100 PMU_PCLK_DIV_CON_SHIFT = 0,
101
102 /* PMUCRU_CLKSEL_CON1 */
103 SPI3_PLL_SEL_SHIFT = 7,
104 SPI3_PLL_SEL_MASK = 1 << SPI3_PLL_SEL_SHIFT,
105 SPI3_PLL_SEL_24M = 0,
106 SPI3_PLL_SEL_PPLL = 1,
107 SPI3_DIV_CON_SHIFT = 0x0,
108 SPI3_DIV_CON_MASK = 0x7f,
109
110 /* PMUCRU_CLKSEL_CON2 */
111 I2C_DIV_CON_MASK = 0x7f,
Kever Yange54d26a2016-08-12 17:47:15 +0800112 CLK_I2C8_DIV_CON_SHIFT = 8,
113 CLK_I2C0_DIV_CON_SHIFT = 0,
Kever Yangca19eac2016-07-29 10:35:25 +0800114
115 /* PMUCRU_CLKSEL_CON3 */
Kever Yange54d26a2016-08-12 17:47:15 +0800116 CLK_I2C4_DIV_CON_SHIFT = 0,
Kever Yangca19eac2016-07-29 10:35:25 +0800117
118 /* CLKSEL_CON0 */
119 ACLKM_CORE_L_DIV_CON_SHIFT = 8,
120 ACLKM_CORE_L_DIV_CON_MASK = 0x1f << ACLKM_CORE_L_DIV_CON_SHIFT,
121 CLK_CORE_L_PLL_SEL_SHIFT = 6,
122 CLK_CORE_L_PLL_SEL_MASK = 3 << CLK_CORE_L_PLL_SEL_SHIFT,
123 CLK_CORE_L_PLL_SEL_ALPLL = 0x0,
124 CLK_CORE_L_PLL_SEL_ABPLL = 0x1,
125 CLK_CORE_L_PLL_SEL_DPLL = 0x10,
126 CLK_CORE_L_PLL_SEL_GPLL = 0x11,
127 CLK_CORE_L_DIV_MASK = 0x1f,
128 CLK_CORE_L_DIV_SHIFT = 0,
129
130 /* CLKSEL_CON1 */
131 PCLK_DBG_L_DIV_SHIFT = 0x8,
132 PCLK_DBG_L_DIV_MASK = 0x1f << PCLK_DBG_L_DIV_SHIFT,
133 ATCLK_CORE_L_DIV_SHIFT = 0,
134 ATCLK_CORE_L_DIV_MASK = 0x1f << ATCLK_CORE_L_DIV_SHIFT,
135
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100136 /* CLKSEL_CON2 */
137 ACLKM_CORE_B_DIV_CON_SHIFT = 8,
138 ACLKM_CORE_B_DIV_CON_MASK = 0x1f << ACLKM_CORE_B_DIV_CON_SHIFT,
139 CLK_CORE_B_PLL_SEL_SHIFT = 6,
140 CLK_CORE_B_PLL_SEL_MASK = 3 << CLK_CORE_B_PLL_SEL_SHIFT,
141 CLK_CORE_B_PLL_SEL_ALPLL = 0x0,
142 CLK_CORE_B_PLL_SEL_ABPLL = 0x1,
143 CLK_CORE_B_PLL_SEL_DPLL = 0x10,
144 CLK_CORE_B_PLL_SEL_GPLL = 0x11,
145 CLK_CORE_B_DIV_MASK = 0x1f,
146 CLK_CORE_B_DIV_SHIFT = 0,
147
148 /* CLKSEL_CON3 */
149 PCLK_DBG_B_DIV_SHIFT = 0x8,
150 PCLK_DBG_B_DIV_MASK = 0x1f << PCLK_DBG_B_DIV_SHIFT,
151 ATCLK_CORE_B_DIV_SHIFT = 0,
152 ATCLK_CORE_B_DIV_MASK = 0x1f << ATCLK_CORE_B_DIV_SHIFT,
153
Kever Yangca19eac2016-07-29 10:35:25 +0800154 /* CLKSEL_CON14 */
155 PCLK_PERIHP_DIV_CON_SHIFT = 12,
156 PCLK_PERIHP_DIV_CON_MASK = 0x7 << PCLK_PERIHP_DIV_CON_SHIFT,
157 HCLK_PERIHP_DIV_CON_SHIFT = 8,
158 HCLK_PERIHP_DIV_CON_MASK = 3 << HCLK_PERIHP_DIV_CON_SHIFT,
159 ACLK_PERIHP_PLL_SEL_SHIFT = 7,
160 ACLK_PERIHP_PLL_SEL_MASK = 1 << ACLK_PERIHP_PLL_SEL_SHIFT,
161 ACLK_PERIHP_PLL_SEL_CPLL = 0,
162 ACLK_PERIHP_PLL_SEL_GPLL = 1,
163 ACLK_PERIHP_DIV_CON_SHIFT = 0,
164 ACLK_PERIHP_DIV_CON_MASK = 0x1f,
165
166 /* CLKSEL_CON21 */
167 ACLK_EMMC_PLL_SEL_SHIFT = 7,
168 ACLK_EMMC_PLL_SEL_MASK = 0x1 << ACLK_EMMC_PLL_SEL_SHIFT,
169 ACLK_EMMC_PLL_SEL_GPLL = 0x1,
170 ACLK_EMMC_DIV_CON_SHIFT = 0,
171 ACLK_EMMC_DIV_CON_MASK = 0x1f,
172
173 /* CLKSEL_CON22 */
174 CLK_EMMC_PLL_SHIFT = 8,
175 CLK_EMMC_PLL_MASK = 0x7 << CLK_EMMC_PLL_SHIFT,
176 CLK_EMMC_PLL_SEL_GPLL = 0x1,
Kever Yangdc850de2016-08-04 11:44:58 +0800177 CLK_EMMC_PLL_SEL_24M = 0x5,
Kever Yangca19eac2016-07-29 10:35:25 +0800178 CLK_EMMC_DIV_CON_SHIFT = 0,
179 CLK_EMMC_DIV_CON_MASK = 0x7f << CLK_EMMC_DIV_CON_SHIFT,
180
181 /* CLKSEL_CON23 */
182 PCLK_PERILP0_DIV_CON_SHIFT = 12,
183 PCLK_PERILP0_DIV_CON_MASK = 0x7 << PCLK_PERILP0_DIV_CON_SHIFT,
184 HCLK_PERILP0_DIV_CON_SHIFT = 8,
185 HCLK_PERILP0_DIV_CON_MASK = 3 << HCLK_PERILP0_DIV_CON_SHIFT,
186 ACLK_PERILP0_PLL_SEL_SHIFT = 7,
187 ACLK_PERILP0_PLL_SEL_MASK = 1 << ACLK_PERILP0_PLL_SEL_SHIFT,
188 ACLK_PERILP0_PLL_SEL_CPLL = 0,
189 ACLK_PERILP0_PLL_SEL_GPLL = 1,
190 ACLK_PERILP0_DIV_CON_SHIFT = 0,
191 ACLK_PERILP0_DIV_CON_MASK = 0x1f,
192
193 /* CLKSEL_CON25 */
194 PCLK_PERILP1_DIV_CON_SHIFT = 8,
195 PCLK_PERILP1_DIV_CON_MASK = 0x7 << PCLK_PERILP1_DIV_CON_SHIFT,
196 HCLK_PERILP1_PLL_SEL_SHIFT = 7,
197 HCLK_PERILP1_PLL_SEL_MASK = 1 << HCLK_PERILP1_PLL_SEL_SHIFT,
198 HCLK_PERILP1_PLL_SEL_CPLL = 0,
199 HCLK_PERILP1_PLL_SEL_GPLL = 1,
200 HCLK_PERILP1_DIV_CON_SHIFT = 0,
201 HCLK_PERILP1_DIV_CON_MASK = 0x1f,
202
203 /* CLKSEL_CON26 */
204 CLK_SARADC_DIV_CON_SHIFT = 8,
David Wuf91b9b42017-09-20 14:38:58 +0800205 CLK_SARADC_DIV_CON_MASK = GENMASK(15, 8),
206 CLK_SARADC_DIV_CON_WIDTH = 8,
Kever Yangca19eac2016-07-29 10:35:25 +0800207
208 /* CLKSEL_CON27 */
209 CLK_TSADC_SEL_X24M = 0x0,
210 CLK_TSADC_SEL_SHIFT = 15,
211 CLK_TSADC_SEL_MASK = 1 << CLK_TSADC_SEL_SHIFT,
212 CLK_TSADC_DIV_CON_SHIFT = 0,
213 CLK_TSADC_DIV_CON_MASK = 0x3ff,
214
215 /* CLKSEL_CON47 & CLKSEL_CON48 */
216 ACLK_VOP_PLL_SEL_SHIFT = 6,
217 ACLK_VOP_PLL_SEL_MASK = 0x3 << ACLK_VOP_PLL_SEL_SHIFT,
218 ACLK_VOP_PLL_SEL_CPLL = 0x1,
219 ACLK_VOP_DIV_CON_SHIFT = 0,
220 ACLK_VOP_DIV_CON_MASK = 0x1f << ACLK_VOP_DIV_CON_SHIFT,
221
222 /* CLKSEL_CON49 & CLKSEL_CON50 */
223 DCLK_VOP_DCLK_SEL_SHIFT = 11,
224 DCLK_VOP_DCLK_SEL_MASK = 1 << DCLK_VOP_DCLK_SEL_SHIFT,
225 DCLK_VOP_DCLK_SEL_DIVOUT = 0,
226 DCLK_VOP_PLL_SEL_SHIFT = 8,
227 DCLK_VOP_PLL_SEL_MASK = 3 << DCLK_VOP_PLL_SEL_SHIFT,
228 DCLK_VOP_PLL_SEL_VPLL = 0,
229 DCLK_VOP_DIV_CON_MASK = 0xff,
230 DCLK_VOP_DIV_CON_SHIFT = 0,
231
232 /* CLKSEL_CON58 */
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200233 CLK_SPI_PLL_SEL_WIDTH = 1,
234 CLK_SPI_PLL_SEL_MASK = ((1 < CLK_SPI_PLL_SEL_WIDTH) - 1),
235 CLK_SPI_PLL_SEL_CPLL = 0,
236 CLK_SPI_PLL_SEL_GPLL = 1,
237 CLK_SPI_PLL_DIV_CON_WIDTH = 7,
238 CLK_SPI_PLL_DIV_CON_MASK = ((1 << CLK_SPI_PLL_DIV_CON_WIDTH) - 1),
239
240 CLK_SPI5_PLL_DIV_CON_SHIFT = 8,
241 CLK_SPI5_PLL_SEL_SHIFT = 15,
Kever Yangca19eac2016-07-29 10:35:25 +0800242
243 /* CLKSEL_CON59 */
244 CLK_SPI1_PLL_SEL_SHIFT = 15,
245 CLK_SPI1_PLL_DIV_CON_SHIFT = 8,
246 CLK_SPI0_PLL_SEL_SHIFT = 7,
247 CLK_SPI0_PLL_DIV_CON_SHIFT = 0,
248
249 /* CLKSEL_CON60 */
250 CLK_SPI4_PLL_SEL_SHIFT = 15,
251 CLK_SPI4_PLL_DIV_CON_SHIFT = 8,
252 CLK_SPI2_PLL_SEL_SHIFT = 7,
253 CLK_SPI2_PLL_DIV_CON_SHIFT = 0,
254
255 /* CLKSEL_CON61 */
256 CLK_I2C_PLL_SEL_MASK = 1,
257 CLK_I2C_PLL_SEL_CPLL = 0,
258 CLK_I2C_PLL_SEL_GPLL = 1,
259 CLK_I2C5_PLL_SEL_SHIFT = 15,
260 CLK_I2C5_DIV_CON_SHIFT = 8,
261 CLK_I2C1_PLL_SEL_SHIFT = 7,
262 CLK_I2C1_DIV_CON_SHIFT = 0,
263
264 /* CLKSEL_CON62 */
265 CLK_I2C6_PLL_SEL_SHIFT = 15,
266 CLK_I2C6_DIV_CON_SHIFT = 8,
267 CLK_I2C2_PLL_SEL_SHIFT = 7,
268 CLK_I2C2_DIV_CON_SHIFT = 0,
269
270 /* CLKSEL_CON63 */
271 CLK_I2C7_PLL_SEL_SHIFT = 15,
272 CLK_I2C7_DIV_CON_SHIFT = 8,
273 CLK_I2C3_PLL_SEL_SHIFT = 7,
274 CLK_I2C3_DIV_CON_SHIFT = 0,
275
276 /* CRU_SOFTRST_CON4 */
277 RESETN_DDR0_REQ_SHIFT = 8,
278 RESETN_DDR0_REQ_MASK = 1 << RESETN_DDR0_REQ_SHIFT,
279 RESETN_DDRPHY0_REQ_SHIFT = 9,
280 RESETN_DDRPHY0_REQ_MASK = 1 << RESETN_DDRPHY0_REQ_SHIFT,
281 RESETN_DDR1_REQ_SHIFT = 12,
282 RESETN_DDR1_REQ_MASK = 1 << RESETN_DDR1_REQ_SHIFT,
283 RESETN_DDRPHY1_REQ_SHIFT = 13,
284 RESETN_DDRPHY1_REQ_MASK = 1 << RESETN_DDRPHY1_REQ_SHIFT,
285};
286
287#define VCO_MAX_KHZ (3200 * (MHz / KHz))
288#define VCO_MIN_KHZ (800 * (MHz / KHz))
289#define OUTPUT_MAX_KHZ (3200 * (MHz / KHz))
290#define OUTPUT_MIN_KHZ (16 * (MHz / KHz))
291
292/*
293 * the div restructions of pll in integer mode, these are defined in
294 * * CRU_*PLL_CON0 or PMUCRU_*PLL_CON0
295 */
296#define PLL_DIV_MIN 16
297#define PLL_DIV_MAX 3200
298
299/*
300 * How to calculate the PLL(from TRM V0.3 Part 1 Page 63):
301 * Formulas also embedded within the Fractional PLL Verilog model:
302 * If DSMPD = 1 (DSM is disabled, "integer mode")
303 * FOUTVCO = FREF / REFDIV * FBDIV
304 * FOUTPOSTDIV = FOUTVCO / POSTDIV1 / POSTDIV2
305 * Where:
306 * FOUTVCO = Fractional PLL non-divided output frequency
307 * FOUTPOSTDIV = Fractional PLL divided output frequency
308 * (output of second post divider)
309 * FREF = Fractional PLL input reference frequency, (the OSC_HZ 24MHz input)
310 * REFDIV = Fractional PLL input reference clock divider
311 * FBDIV = Integer value programmed into feedback divide
312 *
313 */
314static void rkclk_set_pll(u32 *pll_con, const struct pll_div *div)
315{
316 /* All 8 PLLs have same VCO and output frequency range restrictions. */
317 u32 vco_khz = OSC_HZ / 1000 * div->fbdiv / div->refdiv;
318 u32 output_khz = vco_khz / div->postdiv1 / div->postdiv2;
319
320 debug("PLL at %p: fbdiv=%d, refdiv=%d, postdiv1=%d, "
321 "postdiv2=%d, vco=%u khz, output=%u khz\n",
322 pll_con, div->fbdiv, div->refdiv, div->postdiv1,
323 div->postdiv2, vco_khz, output_khz);
324 assert(vco_khz >= VCO_MIN_KHZ && vco_khz <= VCO_MAX_KHZ &&
325 output_khz >= OUTPUT_MIN_KHZ && output_khz <= OUTPUT_MAX_KHZ &&
326 div->fbdiv >= PLL_DIV_MIN && div->fbdiv <= PLL_DIV_MAX);
327
328 /*
329 * When power on or changing PLL setting,
330 * we must force PLL into slow mode to ensure output stable clock.
331 */
332 rk_clrsetreg(&pll_con[3], PLL_MODE_MASK,
333 PLL_MODE_SLOW << PLL_MODE_SHIFT);
334
335 /* use integer mode */
336 rk_clrsetreg(&pll_con[3], PLL_DSMPD_MASK,
337 PLL_INTEGER_MODE << PLL_DSMPD_SHIFT);
338
339 rk_clrsetreg(&pll_con[0], PLL_FBDIV_MASK,
340 div->fbdiv << PLL_FBDIV_SHIFT);
341 rk_clrsetreg(&pll_con[1],
342 PLL_POSTDIV2_MASK | PLL_POSTDIV1_MASK |
343 PLL_REFDIV_MASK | PLL_REFDIV_SHIFT,
344 (div->postdiv2 << PLL_POSTDIV2_SHIFT) |
345 (div->postdiv1 << PLL_POSTDIV1_SHIFT) |
346 (div->refdiv << PLL_REFDIV_SHIFT));
347
348 /* waiting for pll lock */
349 while (!(readl(&pll_con[2]) & (1 << PLL_LOCK_STATUS_SHIFT)))
350 udelay(1);
351
352 /* pll enter normal mode */
353 rk_clrsetreg(&pll_con[3], PLL_MODE_MASK,
354 PLL_MODE_NORM << PLL_MODE_SHIFT);
355}
356
357static int pll_para_config(u32 freq_hz, struct pll_div *div)
358{
359 u32 ref_khz = OSC_HZ / KHz, refdiv, fbdiv = 0;
360 u32 postdiv1, postdiv2 = 1;
361 u32 fref_khz;
362 u32 diff_khz, best_diff_khz;
363 const u32 max_refdiv = 63, max_fbdiv = 3200, min_fbdiv = 16;
364 const u32 max_postdiv1 = 7, max_postdiv2 = 7;
365 u32 vco_khz;
366 u32 freq_khz = freq_hz / KHz;
367
368 if (!freq_hz) {
369 printf("%s: the frequency can't be 0 Hz\n", __func__);
370 return -1;
371 }
372
373 postdiv1 = DIV_ROUND_UP(VCO_MIN_KHZ, freq_khz);
374 if (postdiv1 > max_postdiv1) {
375 postdiv2 = DIV_ROUND_UP(postdiv1, max_postdiv1);
376 postdiv1 = DIV_ROUND_UP(postdiv1, postdiv2);
377 }
378
379 vco_khz = freq_khz * postdiv1 * postdiv2;
380
381 if (vco_khz < VCO_MIN_KHZ || vco_khz > VCO_MAX_KHZ ||
382 postdiv2 > max_postdiv2) {
383 printf("%s: Cannot find out a supported VCO"
384 " for Frequency (%uHz).\n", __func__, freq_hz);
385 return -1;
386 }
387
388 div->postdiv1 = postdiv1;
389 div->postdiv2 = postdiv2;
390
391 best_diff_khz = vco_khz;
392 for (refdiv = 1; refdiv < max_refdiv && best_diff_khz; refdiv++) {
393 fref_khz = ref_khz / refdiv;
394
395 fbdiv = vco_khz / fref_khz;
396 if ((fbdiv >= max_fbdiv) || (fbdiv <= min_fbdiv))
397 continue;
398 diff_khz = vco_khz - fbdiv * fref_khz;
399 if (fbdiv + 1 < max_fbdiv && diff_khz > fref_khz / 2) {
400 fbdiv++;
401 diff_khz = fref_khz - diff_khz;
402 }
403
404 if (diff_khz >= best_diff_khz)
405 continue;
406
407 best_diff_khz = diff_khz;
408 div->refdiv = refdiv;
409 div->fbdiv = fbdiv;
410 }
411
412 if (best_diff_khz > 4 * (MHz/KHz)) {
413 printf("%s: Failed to match output frequency %u, "
414 "difference is %u Hz,exceed 4MHZ\n", __func__, freq_hz,
415 best_diff_khz * KHz);
416 return -1;
417 }
418 return 0;
419}
420
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100421void rk3399_configure_cpu_l(struct rk3399_cru *cru,
422 enum apll_l_frequencies apll_l_freq)
Kever Yangca19eac2016-07-29 10:35:25 +0800423{
424 u32 aclkm_div;
425 u32 pclk_dbg_div;
426 u32 atclk_div;
427
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100428 /* Setup cluster L */
Kever Yangca19eac2016-07-29 10:35:25 +0800429 rkclk_set_pll(&cru->apll_l_con[0], apll_l_cfgs[apll_l_freq]);
430
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100431 aclkm_div = LPLL_HZ / ACLKM_CORE_L_HZ - 1;
432 assert((aclkm_div + 1) * ACLKM_CORE_L_HZ == LPLL_HZ &&
Kever Yangca19eac2016-07-29 10:35:25 +0800433 aclkm_div < 0x1f);
434
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100435 pclk_dbg_div = LPLL_HZ / PCLK_DBG_L_HZ - 1;
436 assert((pclk_dbg_div + 1) * PCLK_DBG_L_HZ == LPLL_HZ &&
Kever Yangca19eac2016-07-29 10:35:25 +0800437 pclk_dbg_div < 0x1f);
438
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100439 atclk_div = LPLL_HZ / ATCLK_CORE_L_HZ - 1;
440 assert((atclk_div + 1) * ATCLK_CORE_L_HZ == LPLL_HZ &&
Kever Yangca19eac2016-07-29 10:35:25 +0800441 atclk_div < 0x1f);
442
443 rk_clrsetreg(&cru->clksel_con[0],
444 ACLKM_CORE_L_DIV_CON_MASK | CLK_CORE_L_PLL_SEL_MASK |
445 CLK_CORE_L_DIV_MASK,
446 aclkm_div << ACLKM_CORE_L_DIV_CON_SHIFT |
447 CLK_CORE_L_PLL_SEL_ALPLL << CLK_CORE_L_PLL_SEL_SHIFT |
448 0 << CLK_CORE_L_DIV_SHIFT);
449
450 rk_clrsetreg(&cru->clksel_con[1],
451 PCLK_DBG_L_DIV_MASK | ATCLK_CORE_L_DIV_MASK,
452 pclk_dbg_div << PCLK_DBG_L_DIV_SHIFT |
453 atclk_div << ATCLK_CORE_L_DIV_SHIFT);
454}
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100455
456void rk3399_configure_cpu_b(struct rk3399_cru *cru,
457 enum apll_b_frequencies apll_b_freq)
458{
459 u32 aclkm_div;
460 u32 pclk_dbg_div;
461 u32 atclk_div;
462
463 /* Setup cluster B */
464 rkclk_set_pll(&cru->apll_b_con[0], apll_b_cfgs[apll_b_freq]);
465
466 aclkm_div = BPLL_HZ / ACLKM_CORE_B_HZ - 1;
467 assert((aclkm_div + 1) * ACLKM_CORE_B_HZ == BPLL_HZ &&
468 aclkm_div < 0x1f);
469
470 pclk_dbg_div = BPLL_HZ / PCLK_DBG_B_HZ - 1;
471 assert((pclk_dbg_div + 1) * PCLK_DBG_B_HZ == BPLL_HZ &&
472 pclk_dbg_div < 0x1f);
473
474 atclk_div = BPLL_HZ / ATCLK_CORE_B_HZ - 1;
475 assert((atclk_div + 1) * ATCLK_CORE_B_HZ == BPLL_HZ &&
476 atclk_div < 0x1f);
477
478 rk_clrsetreg(&cru->clksel_con[2],
479 ACLKM_CORE_B_DIV_CON_MASK | CLK_CORE_B_PLL_SEL_MASK |
480 CLK_CORE_B_DIV_MASK,
481 aclkm_div << ACLKM_CORE_B_DIV_CON_SHIFT |
482 CLK_CORE_B_PLL_SEL_ABPLL << CLK_CORE_B_PLL_SEL_SHIFT |
483 0 << CLK_CORE_B_DIV_SHIFT);
484
485 rk_clrsetreg(&cru->clksel_con[3],
486 PCLK_DBG_B_DIV_MASK | ATCLK_CORE_B_DIV_MASK,
487 pclk_dbg_div << PCLK_DBG_B_DIV_SHIFT |
488 atclk_div << ATCLK_CORE_B_DIV_SHIFT);
489}
490
Kever Yangca19eac2016-07-29 10:35:25 +0800491#define I2C_CLK_REG_MASK(bus) \
492 (I2C_DIV_CON_MASK << \
493 CLK_I2C ##bus## _DIV_CON_SHIFT | \
494 CLK_I2C_PLL_SEL_MASK << \
495 CLK_I2C ##bus## _PLL_SEL_SHIFT)
496
497#define I2C_CLK_REG_VALUE(bus, clk_div) \
498 ((clk_div - 1) << \
499 CLK_I2C ##bus## _DIV_CON_SHIFT | \
500 CLK_I2C_PLL_SEL_GPLL << \
501 CLK_I2C ##bus## _PLL_SEL_SHIFT)
502
503#define I2C_CLK_DIV_VALUE(con, bus) \
504 (con >> CLK_I2C ##bus## _DIV_CON_SHIFT) & \
505 I2C_DIV_CON_MASK;
506
Kever Yange54d26a2016-08-12 17:47:15 +0800507#define I2C_PMUCLK_REG_MASK(bus) \
508 (I2C_DIV_CON_MASK << \
509 CLK_I2C ##bus## _DIV_CON_SHIFT)
510
511#define I2C_PMUCLK_REG_VALUE(bus, clk_div) \
512 ((clk_div - 1) << \
513 CLK_I2C ##bus## _DIV_CON_SHIFT)
514
Kever Yangca19eac2016-07-29 10:35:25 +0800515static ulong rk3399_i2c_get_clk(struct rk3399_cru *cru, ulong clk_id)
516{
517 u32 div, con;
518
519 switch (clk_id) {
520 case SCLK_I2C1:
521 con = readl(&cru->clksel_con[61]);
522 div = I2C_CLK_DIV_VALUE(con, 1);
523 break;
524 case SCLK_I2C2:
525 con = readl(&cru->clksel_con[62]);
526 div = I2C_CLK_DIV_VALUE(con, 2);
527 break;
528 case SCLK_I2C3:
529 con = readl(&cru->clksel_con[63]);
530 div = I2C_CLK_DIV_VALUE(con, 3);
531 break;
532 case SCLK_I2C5:
533 con = readl(&cru->clksel_con[61]);
534 div = I2C_CLK_DIV_VALUE(con, 5);
535 break;
536 case SCLK_I2C6:
537 con = readl(&cru->clksel_con[62]);
538 div = I2C_CLK_DIV_VALUE(con, 6);
539 break;
540 case SCLK_I2C7:
541 con = readl(&cru->clksel_con[63]);
542 div = I2C_CLK_DIV_VALUE(con, 7);
543 break;
544 default:
545 printf("do not support this i2c bus\n");
546 return -EINVAL;
547 }
548
549 return DIV_TO_RATE(GPLL_HZ, div);
550}
551
552static ulong rk3399_i2c_set_clk(struct rk3399_cru *cru, ulong clk_id, uint hz)
553{
554 int src_clk_div;
555
556 /* i2c0,4,8 src clock from ppll, i2c1,2,3,5,6,7 src clock from gpll*/
557 src_clk_div = GPLL_HZ / hz;
558 assert(src_clk_div - 1 < 127);
559
560 switch (clk_id) {
561 case SCLK_I2C1:
562 rk_clrsetreg(&cru->clksel_con[61], I2C_CLK_REG_MASK(1),
563 I2C_CLK_REG_VALUE(1, src_clk_div));
564 break;
565 case SCLK_I2C2:
566 rk_clrsetreg(&cru->clksel_con[62], I2C_CLK_REG_MASK(2),
567 I2C_CLK_REG_VALUE(2, src_clk_div));
568 break;
569 case SCLK_I2C3:
570 rk_clrsetreg(&cru->clksel_con[63], I2C_CLK_REG_MASK(3),
571 I2C_CLK_REG_VALUE(3, src_clk_div));
572 break;
573 case SCLK_I2C5:
574 rk_clrsetreg(&cru->clksel_con[61], I2C_CLK_REG_MASK(5),
575 I2C_CLK_REG_VALUE(5, src_clk_div));
576 break;
577 case SCLK_I2C6:
578 rk_clrsetreg(&cru->clksel_con[62], I2C_CLK_REG_MASK(6),
579 I2C_CLK_REG_VALUE(6, src_clk_div));
580 break;
581 case SCLK_I2C7:
582 rk_clrsetreg(&cru->clksel_con[63], I2C_CLK_REG_MASK(7),
583 I2C_CLK_REG_VALUE(7, src_clk_div));
584 break;
585 default:
586 printf("do not support this i2c bus\n");
587 return -EINVAL;
588 }
589
Philipp Tomsich30d7c152017-04-20 22:05:50 +0200590 return rk3399_i2c_get_clk(cru, clk_id);
Kever Yangca19eac2016-07-29 10:35:25 +0800591}
592
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200593/*
594 * RK3399 SPI clocks have a common divider-width (7 bits) and a single bit
595 * to select either CPLL or GPLL as the clock-parent. The location within
596 * the enclosing CLKSEL_CON (i.e. div_shift and sel_shift) are variable.
597 */
598
599struct spi_clkreg {
600 uint8_t reg; /* CLKSEL_CON[reg] register in CRU */
601 uint8_t div_shift;
602 uint8_t sel_shift;
603};
604
605/*
606 * The entries are numbered relative to their offset from SCLK_SPI0.
607 *
608 * Note that SCLK_SPI3 (which is configured via PMUCRU and requires different
609 * logic is not supported).
610 */
611static const struct spi_clkreg spi_clkregs[] = {
612 [0] = { .reg = 59,
613 .div_shift = CLK_SPI0_PLL_DIV_CON_SHIFT,
614 .sel_shift = CLK_SPI0_PLL_SEL_SHIFT, },
615 [1] = { .reg = 59,
616 .div_shift = CLK_SPI1_PLL_DIV_CON_SHIFT,
617 .sel_shift = CLK_SPI1_PLL_SEL_SHIFT, },
618 [2] = { .reg = 60,
619 .div_shift = CLK_SPI2_PLL_DIV_CON_SHIFT,
620 .sel_shift = CLK_SPI2_PLL_SEL_SHIFT, },
621 [3] = { .reg = 60,
622 .div_shift = CLK_SPI4_PLL_DIV_CON_SHIFT,
623 .sel_shift = CLK_SPI4_PLL_SEL_SHIFT, },
624 [4] = { .reg = 58,
625 .div_shift = CLK_SPI5_PLL_DIV_CON_SHIFT,
626 .sel_shift = CLK_SPI5_PLL_SEL_SHIFT, },
627};
628
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200629static ulong rk3399_spi_get_clk(struct rk3399_cru *cru, ulong clk_id)
630{
631 const struct spi_clkreg *spiclk = NULL;
632 u32 div, val;
633
634 switch (clk_id) {
635 case SCLK_SPI0 ... SCLK_SPI5:
636 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
637 break;
638
639 default:
Masahiro Yamada81e10422017-09-16 14:10:41 +0900640 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200641 return -EINVAL;
642 }
643
644 val = readl(&cru->clksel_con[spiclk->reg]);
Philipp Tomsich8a4868f2017-11-22 19:45:04 +0100645 div = bitfield_extract(val, spiclk->div_shift,
646 CLK_SPI_PLL_DIV_CON_WIDTH);
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200647
648 return DIV_TO_RATE(GPLL_HZ, div);
649}
650
651static ulong rk3399_spi_set_clk(struct rk3399_cru *cru, ulong clk_id, uint hz)
652{
653 const struct spi_clkreg *spiclk = NULL;
654 int src_clk_div;
655
Kever Yangf20995b2017-07-27 12:54:02 +0800656 src_clk_div = DIV_ROUND_UP(GPLL_HZ, hz) - 1;
657 assert(src_clk_div < 128);
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200658
659 switch (clk_id) {
660 case SCLK_SPI1 ... SCLK_SPI5:
661 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
662 break;
663
664 default:
Masahiro Yamada81e10422017-09-16 14:10:41 +0900665 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200666 return -EINVAL;
667 }
668
669 rk_clrsetreg(&cru->clksel_con[spiclk->reg],
670 ((CLK_SPI_PLL_DIV_CON_MASK << spiclk->div_shift) |
671 (CLK_SPI_PLL_SEL_GPLL << spiclk->sel_shift)),
672 ((src_clk_div << spiclk->div_shift) |
673 (CLK_SPI_PLL_SEL_GPLL << spiclk->sel_shift)));
674
Philipp Tomsich30d7c152017-04-20 22:05:50 +0200675 return rk3399_spi_get_clk(cru, clk_id);
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200676}
677
Kever Yangca19eac2016-07-29 10:35:25 +0800678static ulong rk3399_vop_set_clk(struct rk3399_cru *cru, ulong clk_id, u32 hz)
679{
680 struct pll_div vpll_config = {0};
681 int aclk_vop = 198*MHz;
682 void *aclkreg_addr, *dclkreg_addr;
683 u32 div;
684
685 switch (clk_id) {
686 case DCLK_VOP0:
687 aclkreg_addr = &cru->clksel_con[47];
688 dclkreg_addr = &cru->clksel_con[49];
689 break;
690 case DCLK_VOP1:
691 aclkreg_addr = &cru->clksel_con[48];
692 dclkreg_addr = &cru->clksel_con[50];
693 break;
694 default:
695 return -EINVAL;
696 }
697 /* vop aclk source clk: cpll */
698 div = CPLL_HZ / aclk_vop;
699 assert(div - 1 < 32);
700
701 rk_clrsetreg(aclkreg_addr,
702 ACLK_VOP_PLL_SEL_MASK | ACLK_VOP_DIV_CON_MASK,
703 ACLK_VOP_PLL_SEL_CPLL << ACLK_VOP_PLL_SEL_SHIFT |
704 (div - 1) << ACLK_VOP_DIV_CON_SHIFT);
705
706 /* vop dclk source from vpll, and equals to vpll(means div == 1) */
707 if (pll_para_config(hz, &vpll_config))
708 return -1;
709
710 rkclk_set_pll(&cru->vpll_con[0], &vpll_config);
711
712 rk_clrsetreg(dclkreg_addr,
713 DCLK_VOP_DCLK_SEL_MASK | DCLK_VOP_PLL_SEL_MASK|
714 DCLK_VOP_DIV_CON_MASK,
715 DCLK_VOP_DCLK_SEL_DIVOUT << DCLK_VOP_DCLK_SEL_SHIFT |
716 DCLK_VOP_PLL_SEL_VPLL << DCLK_VOP_PLL_SEL_SHIFT |
717 (1 - 1) << DCLK_VOP_DIV_CON_SHIFT);
718
719 return hz;
720}
721
722static ulong rk3399_mmc_get_clk(struct rk3399_cru *cru, uint clk_id)
723{
724 u32 div, con;
725
726 switch (clk_id) {
Philipp Tomsich78a73142017-04-25 09:52:06 +0200727 case HCLK_SDMMC:
Kever Yangca19eac2016-07-29 10:35:25 +0800728 case SCLK_SDMMC:
729 con = readl(&cru->clksel_con[16]);
Kever Yang99b546d2017-07-27 12:54:01 +0800730 /* dwmmc controller have internal div 2 */
731 div = 2;
Kever Yangca19eac2016-07-29 10:35:25 +0800732 break;
733 case SCLK_EMMC:
734 con = readl(&cru->clksel_con[21]);
Kever Yang99b546d2017-07-27 12:54:01 +0800735 div = 1;
Kever Yangca19eac2016-07-29 10:35:25 +0800736 break;
737 default:
738 return -EINVAL;
739 }
Kever Yangca19eac2016-07-29 10:35:25 +0800740
Kever Yang99b546d2017-07-27 12:54:01 +0800741 div *= (con & CLK_EMMC_DIV_CON_MASK) >> CLK_EMMC_DIV_CON_SHIFT;
Kever Yangdc850de2016-08-04 11:44:58 +0800742 if ((con & CLK_EMMC_PLL_MASK) >> CLK_EMMC_PLL_SHIFT
743 == CLK_EMMC_PLL_SEL_24M)
Kever Yang99b546d2017-07-27 12:54:01 +0800744 return DIV_TO_RATE(OSC_HZ, div);
Kever Yangdc850de2016-08-04 11:44:58 +0800745 else
746 return DIV_TO_RATE(GPLL_HZ, div);
Kever Yangca19eac2016-07-29 10:35:25 +0800747}
748
749static ulong rk3399_mmc_set_clk(struct rk3399_cru *cru,
750 ulong clk_id, ulong set_rate)
751{
752 int src_clk_div;
753 int aclk_emmc = 198*MHz;
754
755 switch (clk_id) {
Philipp Tomsich78a73142017-04-25 09:52:06 +0200756 case HCLK_SDMMC:
Kever Yangca19eac2016-07-29 10:35:25 +0800757 case SCLK_SDMMC:
Kever Yangdc850de2016-08-04 11:44:58 +0800758 /* Select clk_sdmmc source from GPLL by default */
Kever Yang99b546d2017-07-27 12:54:01 +0800759 /* mmc clock defaulg div 2 internal, provide double in cru */
760 src_clk_div = DIV_ROUND_UP(GPLL_HZ / 2, set_rate);
Kever Yangca19eac2016-07-29 10:35:25 +0800761
Kever Yangf20995b2017-07-27 12:54:02 +0800762 if (src_clk_div > 128) {
Kever Yangdc850de2016-08-04 11:44:58 +0800763 /* use 24MHz source for 400KHz clock */
Kever Yang99b546d2017-07-27 12:54:01 +0800764 src_clk_div = DIV_ROUND_UP(OSC_HZ / 2, set_rate);
Kever Yangf20995b2017-07-27 12:54:02 +0800765 assert(src_clk_div - 1 < 128);
Kever Yangdc850de2016-08-04 11:44:58 +0800766 rk_clrsetreg(&cru->clksel_con[16],
767 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
768 CLK_EMMC_PLL_SEL_24M << CLK_EMMC_PLL_SHIFT |
769 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
770 } else {
771 rk_clrsetreg(&cru->clksel_con[16],
772 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
773 CLK_EMMC_PLL_SEL_GPLL << CLK_EMMC_PLL_SHIFT |
774 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
775 }
Kever Yangca19eac2016-07-29 10:35:25 +0800776 break;
777 case SCLK_EMMC:
778 /* Select aclk_emmc source from GPLL */
Kever Yangf20995b2017-07-27 12:54:02 +0800779 src_clk_div = DIV_ROUND_UP(GPLL_HZ , aclk_emmc);
780 assert(src_clk_div - 1 < 32);
Kever Yangca19eac2016-07-29 10:35:25 +0800781
782 rk_clrsetreg(&cru->clksel_con[21],
783 ACLK_EMMC_PLL_SEL_MASK | ACLK_EMMC_DIV_CON_MASK,
784 ACLK_EMMC_PLL_SEL_GPLL << ACLK_EMMC_PLL_SEL_SHIFT |
785 (src_clk_div - 1) << ACLK_EMMC_DIV_CON_SHIFT);
786
787 /* Select clk_emmc source from GPLL too */
Kever Yangf20995b2017-07-27 12:54:02 +0800788 src_clk_div = DIV_ROUND_UP(GPLL_HZ, set_rate);
789 assert(src_clk_div - 1 < 128);
Kever Yangca19eac2016-07-29 10:35:25 +0800790
791 rk_clrsetreg(&cru->clksel_con[22],
792 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
793 CLK_EMMC_PLL_SEL_GPLL << CLK_EMMC_PLL_SHIFT |
794 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
795 break;
796 default:
797 return -EINVAL;
798 }
799 return rk3399_mmc_get_clk(cru, clk_id);
800}
801
Philipp Tomsichf4ba6ed2018-01-08 13:11:01 +0100802static ulong rk3399_gmac_set_clk(struct rk3399_cru *cru, ulong rate)
803{
804 ulong ret;
805
806 /*
807 * The RGMII CLK can be derived either from an external "clkin"
808 * or can be generated from internally by a divider from SCLK_MAC.
809 */
810 if (readl(&cru->clksel_con[19]) & BIT(4)) {
811 /* An external clock will always generate the right rate... */
812 ret = rate;
813 } else {
814 /*
815 * No platform uses an internal clock to date.
816 * Implement this once it becomes necessary and print an error
817 * if someone tries to use it (while it remains unimplemented).
818 */
819 pr_err("%s: internal clock is UNIMPLEMENTED\n", __func__);
820 ret = 0;
821 }
822
823 return ret;
824}
825
Kever Yange1980532017-02-13 17:38:56 +0800826#define PMUSGRF_DDR_RGN_CON16 0xff330040
827static ulong rk3399_ddr_set_clk(struct rk3399_cru *cru,
828 ulong set_rate)
829{
830 struct pll_div dpll_cfg;
831
832 /* IC ECO bug, need to set this register */
833 writel(0xc000c000, PMUSGRF_DDR_RGN_CON16);
834
835 /* clk_ddrc == DPLL = 24MHz / refdiv * fbdiv / postdiv1 / postdiv2 */
836 switch (set_rate) {
837 case 200*MHz:
838 dpll_cfg = (struct pll_div)
839 {.refdiv = 1, .fbdiv = 50, .postdiv1 = 6, .postdiv2 = 1};
840 break;
841 case 300*MHz:
842 dpll_cfg = (struct pll_div)
843 {.refdiv = 2, .fbdiv = 100, .postdiv1 = 4, .postdiv2 = 1};
844 break;
845 case 666*MHz:
846 dpll_cfg = (struct pll_div)
847 {.refdiv = 2, .fbdiv = 111, .postdiv1 = 2, .postdiv2 = 1};
848 break;
849 case 800*MHz:
850 dpll_cfg = (struct pll_div)
851 {.refdiv = 1, .fbdiv = 100, .postdiv1 = 3, .postdiv2 = 1};
852 break;
853 case 933*MHz:
854 dpll_cfg = (struct pll_div)
855 {.refdiv = 1, .fbdiv = 116, .postdiv1 = 3, .postdiv2 = 1};
856 break;
857 default:
Masahiro Yamada81e10422017-09-16 14:10:41 +0900858 pr_err("Unsupported SDRAM frequency!,%ld\n", set_rate);
Kever Yange1980532017-02-13 17:38:56 +0800859 }
860 rkclk_set_pll(&cru->dpll_con[0], &dpll_cfg);
861
862 return set_rate;
863}
David Wuf91b9b42017-09-20 14:38:58 +0800864
865static ulong rk3399_saradc_get_clk(struct rk3399_cru *cru)
866{
867 u32 div, val;
868
869 val = readl(&cru->clksel_con[26]);
870 div = bitfield_extract(val, CLK_SARADC_DIV_CON_SHIFT,
871 CLK_SARADC_DIV_CON_WIDTH);
872
873 return DIV_TO_RATE(OSC_HZ, div);
874}
875
876static ulong rk3399_saradc_set_clk(struct rk3399_cru *cru, uint hz)
877{
878 int src_clk_div;
879
880 src_clk_div = DIV_ROUND_UP(OSC_HZ, hz) - 1;
881 assert(src_clk_div < 128);
882
883 rk_clrsetreg(&cru->clksel_con[26],
884 CLK_SARADC_DIV_CON_MASK,
885 src_clk_div << CLK_SARADC_DIV_CON_SHIFT);
886
887 return rk3399_saradc_get_clk(cru);
888}
889
Kever Yangca19eac2016-07-29 10:35:25 +0800890static ulong rk3399_clk_get_rate(struct clk *clk)
891{
892 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
893 ulong rate = 0;
894
895 switch (clk->id) {
896 case 0 ... 63:
897 return 0;
Philipp Tomsich78a73142017-04-25 09:52:06 +0200898 case HCLK_SDMMC:
Kever Yangca19eac2016-07-29 10:35:25 +0800899 case SCLK_SDMMC:
900 case SCLK_EMMC:
901 rate = rk3399_mmc_get_clk(priv->cru, clk->id);
902 break;
903 case SCLK_I2C1:
904 case SCLK_I2C2:
905 case SCLK_I2C3:
906 case SCLK_I2C5:
907 case SCLK_I2C6:
908 case SCLK_I2C7:
909 rate = rk3399_i2c_get_clk(priv->cru, clk->id);
910 break;
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200911 case SCLK_SPI0...SCLK_SPI5:
912 rate = rk3399_spi_get_clk(priv->cru, clk->id);
913 break;
914 case SCLK_UART0:
915 case SCLK_UART2:
916 return 24000000;
Philipp Tomsich10b594b2017-04-28 18:33:57 +0200917 break;
918 case PCLK_HDMI_CTRL:
919 break;
Kever Yangca19eac2016-07-29 10:35:25 +0800920 case DCLK_VOP0:
921 case DCLK_VOP1:
922 break;
Philipp Tomsichd10b45e2017-04-28 17:11:55 +0200923 case PCLK_EFUSE1024NS:
924 break;
David Wuf91b9b42017-09-20 14:38:58 +0800925 case SCLK_SARADC:
926 rate = rk3399_saradc_get_clk(priv->cru);
927 break;
Simon Glassd27b3172019-01-21 14:53:30 -0700928 case ACLK_VIO:
929 case ACLK_HDCP:
930 case ACLK_GIC_PRE:
931 case PCLK_DDR:
932 break;
Kever Yangca19eac2016-07-29 10:35:25 +0800933 default:
Simon Glassd27b3172019-01-21 14:53:30 -0700934 log_debug("Unknown clock %lu\n", clk->id);
Kever Yangca19eac2016-07-29 10:35:25 +0800935 return -ENOENT;
936 }
937
938 return rate;
939}
940
941static ulong rk3399_clk_set_rate(struct clk *clk, ulong rate)
942{
943 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
944 ulong ret = 0;
945
946 switch (clk->id) {
947 case 0 ... 63:
948 return 0;
Philipp Tomsich2d20a632018-01-08 14:00:27 +0100949
950 case ACLK_PERIHP:
951 case HCLK_PERIHP:
952 case PCLK_PERIHP:
953 return 0;
954
955 case ACLK_PERILP0:
956 case HCLK_PERILP0:
957 case PCLK_PERILP0:
958 return 0;
959
960 case ACLK_CCI:
961 return 0;
962
963 case HCLK_PERILP1:
964 case PCLK_PERILP1:
965 return 0;
966
Philipp Tomsich78a73142017-04-25 09:52:06 +0200967 case HCLK_SDMMC:
Kever Yangca19eac2016-07-29 10:35:25 +0800968 case SCLK_SDMMC:
969 case SCLK_EMMC:
970 ret = rk3399_mmc_set_clk(priv->cru, clk->id, rate);
971 break;
Philipp Tomsichbfa896c2017-03-24 19:24:25 +0100972 case SCLK_MAC:
Philipp Tomsichf4ba6ed2018-01-08 13:11:01 +0100973 ret = rk3399_gmac_set_clk(priv->cru, rate);
Philipp Tomsichbfa896c2017-03-24 19:24:25 +0100974 break;
Kever Yangca19eac2016-07-29 10:35:25 +0800975 case SCLK_I2C1:
976 case SCLK_I2C2:
977 case SCLK_I2C3:
978 case SCLK_I2C5:
979 case SCLK_I2C6:
980 case SCLK_I2C7:
981 ret = rk3399_i2c_set_clk(priv->cru, clk->id, rate);
982 break;
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200983 case SCLK_SPI0...SCLK_SPI5:
984 ret = rk3399_spi_set_clk(priv->cru, clk->id, rate);
985 break;
Philipp Tomsich10b594b2017-04-28 18:33:57 +0200986 case PCLK_HDMI_CTRL:
987 case PCLK_VIO_GRF:
988 /* the PCLK gates for video are enabled by default */
989 break;
Kever Yangca19eac2016-07-29 10:35:25 +0800990 case DCLK_VOP0:
991 case DCLK_VOP1:
Kever Yange54d26a2016-08-12 17:47:15 +0800992 ret = rk3399_vop_set_clk(priv->cru, clk->id, rate);
Kever Yangca19eac2016-07-29 10:35:25 +0800993 break;
Kever Yange1980532017-02-13 17:38:56 +0800994 case SCLK_DDRCLK:
995 ret = rk3399_ddr_set_clk(priv->cru, rate);
996 break;
Philipp Tomsichd10b45e2017-04-28 17:11:55 +0200997 case PCLK_EFUSE1024NS:
998 break;
David Wuf91b9b42017-09-20 14:38:58 +0800999 case SCLK_SARADC:
1000 ret = rk3399_saradc_set_clk(priv->cru, rate);
1001 break;
Simon Glassd27b3172019-01-21 14:53:30 -07001002 case ACLK_VIO:
1003 case ACLK_HDCP:
1004 case ACLK_GIC_PRE:
1005 case PCLK_DDR:
1006 return 0;
Kever Yangca19eac2016-07-29 10:35:25 +08001007 default:
Simon Glassd27b3172019-01-21 14:53:30 -07001008 log_debug("Unknown clock %lu\n", clk->id);
Kever Yangca19eac2016-07-29 10:35:25 +08001009 return -ENOENT;
1010 }
1011
1012 return ret;
1013}
1014
Philipp Tomsich6dd2fb42018-01-25 15:27:10 +01001015static int __maybe_unused rk3399_gmac_set_parent(struct clk *clk, struct clk *parent)
Philipp Tomsichf4ba6ed2018-01-08 13:11:01 +01001016{
1017 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
1018 const char *clock_output_name;
1019 int ret;
1020
1021 /*
1022 * If the requested parent is in the same clock-controller and
1023 * the id is SCLK_MAC ("clk_gmac"), switch to the internal clock.
1024 */
1025 if ((parent->dev == clk->dev) && (parent->id == SCLK_MAC)) {
1026 debug("%s: switching RGMII to SCLK_MAC\n", __func__);
1027 rk_clrreg(&priv->cru->clksel_con[19], BIT(4));
1028 return 0;
1029 }
1030
1031 /*
1032 * Otherwise, we need to check the clock-output-names of the
1033 * requested parent to see if the requested id is "clkin_gmac".
1034 */
1035 ret = dev_read_string_index(parent->dev, "clock-output-names",
1036 parent->id, &clock_output_name);
1037 if (ret < 0)
1038 return -ENODATA;
1039
1040 /* If this is "clkin_gmac", switch to the external clock input */
1041 if (!strcmp(clock_output_name, "clkin_gmac")) {
1042 debug("%s: switching RGMII to CLKIN\n", __func__);
1043 rk_setreg(&priv->cru->clksel_con[19], BIT(4));
1044 return 0;
1045 }
1046
1047 return -EINVAL;
1048}
1049
Philipp Tomsich6dd2fb42018-01-25 15:27:10 +01001050static int __maybe_unused rk3399_clk_set_parent(struct clk *clk, struct clk *parent)
Philipp Tomsichf4ba6ed2018-01-08 13:11:01 +01001051{
1052 switch (clk->id) {
1053 case SCLK_RMII_SRC:
1054 return rk3399_gmac_set_parent(clk, parent);
1055 }
1056
1057 debug("%s: unsupported clk %ld\n", __func__, clk->id);
1058 return -ENOENT;
1059}
1060
Philipp Tomsich62332c12017-09-12 17:30:56 +02001061static int rk3399_clk_enable(struct clk *clk)
1062{
1063 switch (clk->id) {
1064 case HCLK_HOST0:
1065 case HCLK_HOST0_ARB:
1066 case HCLK_HOST1:
1067 case HCLK_HOST1_ARB:
1068 return 0;
Philipp Tomsich0b3cd542018-02-16 16:07:24 +01001069
1070 case SCLK_MAC:
1071 case SCLK_MAC_RX:
1072 case SCLK_MAC_TX:
1073 case SCLK_MACREF:
1074 case SCLK_MACREF_OUT:
1075 case ACLK_GMAC:
1076 case PCLK_GMAC:
1077 /* Required to successfully probe the Designware GMAC driver */
1078 return 0;
Philipp Tomsich62332c12017-09-12 17:30:56 +02001079 }
1080
1081 debug("%s: unsupported clk %ld\n", __func__, clk->id);
1082 return -ENOENT;
1083}
1084
Kever Yangca19eac2016-07-29 10:35:25 +08001085static struct clk_ops rk3399_clk_ops = {
1086 .get_rate = rk3399_clk_get_rate,
1087 .set_rate = rk3399_clk_set_rate,
Philipp Tomsich6dd2fb42018-01-25 15:27:10 +01001088#if CONFIG_IS_ENABLED(OF_CONTROL) && !CONFIG_IS_ENABLED(OF_PLATDATA)
Philipp Tomsichf4ba6ed2018-01-08 13:11:01 +01001089 .set_parent = rk3399_clk_set_parent,
Philipp Tomsich6dd2fb42018-01-25 15:27:10 +01001090#endif
Philipp Tomsich62332c12017-09-12 17:30:56 +02001091 .enable = rk3399_clk_enable,
Kever Yangca19eac2016-07-29 10:35:25 +08001092};
1093
Kever Yang05a14b02017-10-12 15:27:29 +08001094#ifdef CONFIG_SPL_BUILD
1095static void rkclk_init(struct rk3399_cru *cru)
1096{
1097 u32 aclk_div;
1098 u32 hclk_div;
1099 u32 pclk_div;
1100
Christoph Muellner25c7ba92018-11-30 20:32:48 +01001101 rk3399_configure_cpu_l(cru, APLL_L_600_MHZ);
1102 rk3399_configure_cpu_b(cru, APLL_B_600_MHZ);
Kever Yang05a14b02017-10-12 15:27:29 +08001103 /*
1104 * some cru registers changed by bootrom, we'd better reset them to
1105 * reset/default values described in TRM to avoid confusion in kernel.
1106 * Please consider these three lines as a fix of bootrom bug.
1107 */
1108 rk_clrsetreg(&cru->clksel_con[12], 0xffff, 0x4101);
1109 rk_clrsetreg(&cru->clksel_con[19], 0xffff, 0x033f);
1110 rk_clrsetreg(&cru->clksel_con[56], 0x0003, 0x0003);
1111
1112 /* configure gpll cpll */
1113 rkclk_set_pll(&cru->gpll_con[0], &gpll_init_cfg);
1114 rkclk_set_pll(&cru->cpll_con[0], &cpll_init_cfg);
1115
1116 /* configure perihp aclk, hclk, pclk */
1117 aclk_div = GPLL_HZ / PERIHP_ACLK_HZ - 1;
1118 assert((aclk_div + 1) * PERIHP_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
1119
1120 hclk_div = PERIHP_ACLK_HZ / PERIHP_HCLK_HZ - 1;
1121 assert((hclk_div + 1) * PERIHP_HCLK_HZ ==
1122 PERIHP_ACLK_HZ && (hclk_div < 0x4));
1123
1124 pclk_div = PERIHP_ACLK_HZ / PERIHP_PCLK_HZ - 1;
1125 assert((pclk_div + 1) * PERIHP_PCLK_HZ ==
1126 PERIHP_ACLK_HZ && (pclk_div < 0x7));
1127
1128 rk_clrsetreg(&cru->clksel_con[14],
1129 PCLK_PERIHP_DIV_CON_MASK | HCLK_PERIHP_DIV_CON_MASK |
1130 ACLK_PERIHP_PLL_SEL_MASK | ACLK_PERIHP_DIV_CON_MASK,
1131 pclk_div << PCLK_PERIHP_DIV_CON_SHIFT |
1132 hclk_div << HCLK_PERIHP_DIV_CON_SHIFT |
1133 ACLK_PERIHP_PLL_SEL_GPLL << ACLK_PERIHP_PLL_SEL_SHIFT |
1134 aclk_div << ACLK_PERIHP_DIV_CON_SHIFT);
1135
1136 /* configure perilp0 aclk, hclk, pclk */
1137 aclk_div = GPLL_HZ / PERILP0_ACLK_HZ - 1;
1138 assert((aclk_div + 1) * PERILP0_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
1139
1140 hclk_div = PERILP0_ACLK_HZ / PERILP0_HCLK_HZ - 1;
1141 assert((hclk_div + 1) * PERILP0_HCLK_HZ ==
1142 PERILP0_ACLK_HZ && (hclk_div < 0x4));
1143
1144 pclk_div = PERILP0_ACLK_HZ / PERILP0_PCLK_HZ - 1;
1145 assert((pclk_div + 1) * PERILP0_PCLK_HZ ==
1146 PERILP0_ACLK_HZ && (pclk_div < 0x7));
1147
1148 rk_clrsetreg(&cru->clksel_con[23],
1149 PCLK_PERILP0_DIV_CON_MASK | HCLK_PERILP0_DIV_CON_MASK |
1150 ACLK_PERILP0_PLL_SEL_MASK | ACLK_PERILP0_DIV_CON_MASK,
1151 pclk_div << PCLK_PERILP0_DIV_CON_SHIFT |
1152 hclk_div << HCLK_PERILP0_DIV_CON_SHIFT |
1153 ACLK_PERILP0_PLL_SEL_GPLL << ACLK_PERILP0_PLL_SEL_SHIFT |
1154 aclk_div << ACLK_PERILP0_DIV_CON_SHIFT);
1155
1156 /* perilp1 hclk select gpll as source */
1157 hclk_div = GPLL_HZ / PERILP1_HCLK_HZ - 1;
1158 assert((hclk_div + 1) * PERILP1_HCLK_HZ ==
1159 GPLL_HZ && (hclk_div < 0x1f));
1160
1161 pclk_div = PERILP1_HCLK_HZ / PERILP1_HCLK_HZ - 1;
1162 assert((pclk_div + 1) * PERILP1_HCLK_HZ ==
1163 PERILP1_HCLK_HZ && (hclk_div < 0x7));
1164
1165 rk_clrsetreg(&cru->clksel_con[25],
1166 PCLK_PERILP1_DIV_CON_MASK | HCLK_PERILP1_DIV_CON_MASK |
1167 HCLK_PERILP1_PLL_SEL_MASK,
1168 pclk_div << PCLK_PERILP1_DIV_CON_SHIFT |
1169 hclk_div << HCLK_PERILP1_DIV_CON_SHIFT |
1170 HCLK_PERILP1_PLL_SEL_GPLL << HCLK_PERILP1_PLL_SEL_SHIFT);
1171}
1172#endif
1173
Kever Yangca19eac2016-07-29 10:35:25 +08001174static int rk3399_clk_probe(struct udevice *dev)
1175{
Kever Yange1980532017-02-13 17:38:56 +08001176#ifdef CONFIG_SPL_BUILD
Kever Yangca19eac2016-07-29 10:35:25 +08001177 struct rk3399_clk_priv *priv = dev_get_priv(dev);
1178
Kever Yange1980532017-02-13 17:38:56 +08001179#if CONFIG_IS_ENABLED(OF_PLATDATA)
1180 struct rk3399_clk_plat *plat = dev_get_platdata(dev);
Kever Yangca19eac2016-07-29 10:35:25 +08001181
Simon Glass1b1fe412017-08-29 14:15:50 -06001182 priv->cru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Kever Yange1980532017-02-13 17:38:56 +08001183#endif
1184 rkclk_init(priv->cru);
1185#endif
Kever Yangca19eac2016-07-29 10:35:25 +08001186 return 0;
1187}
1188
1189static int rk3399_clk_ofdata_to_platdata(struct udevice *dev)
1190{
Kever Yange1980532017-02-13 17:38:56 +08001191#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Kever Yangca19eac2016-07-29 10:35:25 +08001192 struct rk3399_clk_priv *priv = dev_get_priv(dev);
1193
Philipp Tomsich44d76842017-09-12 17:32:24 +02001194 priv->cru = dev_read_addr_ptr(dev);
Kever Yange1980532017-02-13 17:38:56 +08001195#endif
Kever Yangca19eac2016-07-29 10:35:25 +08001196 return 0;
1197}
1198
1199static int rk3399_clk_bind(struct udevice *dev)
1200{
1201 int ret;
Kever Yang4fbb6c22017-11-03 15:16:13 +08001202 struct udevice *sys_child;
1203 struct sysreset_reg *priv;
Kever Yangca19eac2016-07-29 10:35:25 +08001204
1205 /* The reset driver does not have a device node, so bind it here */
Kever Yang4fbb6c22017-11-03 15:16:13 +08001206 ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset",
1207 &sys_child);
1208 if (ret) {
1209 debug("Warning: No sysreset driver: ret=%d\n", ret);
1210 } else {
1211 priv = malloc(sizeof(struct sysreset_reg));
1212 priv->glb_srst_fst_value = offsetof(struct rk3399_cru,
1213 glb_srst_fst_value);
1214 priv->glb_srst_snd_value = offsetof(struct rk3399_cru,
1215 glb_srst_snd_value);
1216 sys_child->priv = priv;
1217 }
Kever Yangca19eac2016-07-29 10:35:25 +08001218
Elaine Zhang432976f2017-12-19 18:22:38 +08001219#if CONFIG_IS_ENABLED(CONFIG_RESET_ROCKCHIP)
1220 ret = offsetof(struct rk3399_cru, softrst_con[0]);
1221 ret = rockchip_reset_bind(dev, ret, 21);
1222 if (ret)
1223 debug("Warning: software reset driver bind faile\n");
1224#endif
1225
Kever Yangca19eac2016-07-29 10:35:25 +08001226 return 0;
1227}
1228
1229static const struct udevice_id rk3399_clk_ids[] = {
1230 { .compatible = "rockchip,rk3399-cru" },
1231 { }
1232};
1233
1234U_BOOT_DRIVER(clk_rk3399) = {
Kever Yange1980532017-02-13 17:38:56 +08001235 .name = "rockchip_rk3399_cru",
Kever Yangca19eac2016-07-29 10:35:25 +08001236 .id = UCLASS_CLK,
1237 .of_match = rk3399_clk_ids,
1238 .priv_auto_alloc_size = sizeof(struct rk3399_clk_priv),
1239 .ofdata_to_platdata = rk3399_clk_ofdata_to_platdata,
1240 .ops = &rk3399_clk_ops,
1241 .bind = rk3399_clk_bind,
1242 .probe = rk3399_clk_probe,
Kever Yange1980532017-02-13 17:38:56 +08001243#if CONFIG_IS_ENABLED(OF_PLATDATA)
1244 .platdata_auto_alloc_size = sizeof(struct rk3399_clk_plat),
1245#endif
Kever Yangca19eac2016-07-29 10:35:25 +08001246};
Kever Yange54d26a2016-08-12 17:47:15 +08001247
1248static ulong rk3399_i2c_get_pmuclk(struct rk3399_pmucru *pmucru, ulong clk_id)
1249{
1250 u32 div, con;
1251
1252 switch (clk_id) {
1253 case SCLK_I2C0_PMU:
1254 con = readl(&pmucru->pmucru_clksel[2]);
1255 div = I2C_CLK_DIV_VALUE(con, 0);
1256 break;
1257 case SCLK_I2C4_PMU:
1258 con = readl(&pmucru->pmucru_clksel[3]);
1259 div = I2C_CLK_DIV_VALUE(con, 4);
1260 break;
1261 case SCLK_I2C8_PMU:
1262 con = readl(&pmucru->pmucru_clksel[2]);
1263 div = I2C_CLK_DIV_VALUE(con, 8);
1264 break;
1265 default:
1266 printf("do not support this i2c bus\n");
1267 return -EINVAL;
1268 }
1269
1270 return DIV_TO_RATE(PPLL_HZ, div);
1271}
1272
1273static ulong rk3399_i2c_set_pmuclk(struct rk3399_pmucru *pmucru, ulong clk_id,
1274 uint hz)
1275{
1276 int src_clk_div;
1277
1278 src_clk_div = PPLL_HZ / hz;
1279 assert(src_clk_div - 1 < 127);
1280
1281 switch (clk_id) {
1282 case SCLK_I2C0_PMU:
1283 rk_clrsetreg(&pmucru->pmucru_clksel[2], I2C_PMUCLK_REG_MASK(0),
1284 I2C_PMUCLK_REG_VALUE(0, src_clk_div));
1285 break;
1286 case SCLK_I2C4_PMU:
1287 rk_clrsetreg(&pmucru->pmucru_clksel[3], I2C_PMUCLK_REG_MASK(4),
1288 I2C_PMUCLK_REG_VALUE(4, src_clk_div));
1289 break;
1290 case SCLK_I2C8_PMU:
1291 rk_clrsetreg(&pmucru->pmucru_clksel[2], I2C_PMUCLK_REG_MASK(8),
1292 I2C_PMUCLK_REG_VALUE(8, src_clk_div));
1293 break;
1294 default:
1295 printf("do not support this i2c bus\n");
1296 return -EINVAL;
1297 }
1298
1299 return DIV_TO_RATE(PPLL_HZ, src_clk_div);
1300}
1301
1302static ulong rk3399_pwm_get_clk(struct rk3399_pmucru *pmucru)
1303{
1304 u32 div, con;
1305
1306 /* PWM closk rate is same as pclk_pmu */
1307 con = readl(&pmucru->pmucru_clksel[0]);
1308 div = con & PMU_PCLK_DIV_CON_MASK;
1309
1310 return DIV_TO_RATE(PPLL_HZ, div);
1311}
1312
1313static ulong rk3399_pmuclk_get_rate(struct clk *clk)
1314{
1315 struct rk3399_pmuclk_priv *priv = dev_get_priv(clk->dev);
1316 ulong rate = 0;
1317
1318 switch (clk->id) {
Philipp Tomsich932908c2018-02-23 17:36:41 +01001319 case PLL_PPLL:
1320 return PPLL_HZ;
Kever Yange54d26a2016-08-12 17:47:15 +08001321 case PCLK_RKPWM_PMU:
1322 rate = rk3399_pwm_get_clk(priv->pmucru);
1323 break;
1324 case SCLK_I2C0_PMU:
1325 case SCLK_I2C4_PMU:
1326 case SCLK_I2C8_PMU:
1327 rate = rk3399_i2c_get_pmuclk(priv->pmucru, clk->id);
1328 break;
1329 default:
1330 return -ENOENT;
1331 }
1332
1333 return rate;
1334}
1335
1336static ulong rk3399_pmuclk_set_rate(struct clk *clk, ulong rate)
1337{
1338 struct rk3399_pmuclk_priv *priv = dev_get_priv(clk->dev);
1339 ulong ret = 0;
1340
1341 switch (clk->id) {
Philipp Tomsich932908c2018-02-23 17:36:41 +01001342 case PLL_PPLL:
1343 /*
1344 * This has already been set up and we don't want/need
1345 * to change it here. Accept the request though, as the
1346 * device-tree has this in an 'assigned-clocks' list.
1347 */
1348 return PPLL_HZ;
Kever Yange54d26a2016-08-12 17:47:15 +08001349 case SCLK_I2C0_PMU:
1350 case SCLK_I2C4_PMU:
1351 case SCLK_I2C8_PMU:
1352 ret = rk3399_i2c_set_pmuclk(priv->pmucru, clk->id, rate);
1353 break;
1354 default:
1355 return -ENOENT;
1356 }
1357
1358 return ret;
1359}
1360
1361static struct clk_ops rk3399_pmuclk_ops = {
1362 .get_rate = rk3399_pmuclk_get_rate,
1363 .set_rate = rk3399_pmuclk_set_rate,
1364};
1365
Kever Yange1980532017-02-13 17:38:56 +08001366#ifndef CONFIG_SPL_BUILD
Kever Yange54d26a2016-08-12 17:47:15 +08001367static void pmuclk_init(struct rk3399_pmucru *pmucru)
1368{
1369 u32 pclk_div;
1370
1371 /* configure pmu pll(ppll) */
1372 rkclk_set_pll(&pmucru->ppll_con[0], &ppll_init_cfg);
1373
1374 /* configure pmu pclk */
1375 pclk_div = PPLL_HZ / PMU_PCLK_HZ - 1;
Kever Yange54d26a2016-08-12 17:47:15 +08001376 rk_clrsetreg(&pmucru->pmucru_clksel[0],
1377 PMU_PCLK_DIV_CON_MASK,
1378 pclk_div << PMU_PCLK_DIV_CON_SHIFT);
1379}
Kever Yange1980532017-02-13 17:38:56 +08001380#endif
Kever Yange54d26a2016-08-12 17:47:15 +08001381
1382static int rk3399_pmuclk_probe(struct udevice *dev)
1383{
Philipp Tomsichcf0a4ba2017-03-24 19:24:24 +01001384#if CONFIG_IS_ENABLED(OF_PLATDATA) || !defined(CONFIG_SPL_BUILD)
Kever Yange54d26a2016-08-12 17:47:15 +08001385 struct rk3399_pmuclk_priv *priv = dev_get_priv(dev);
Philipp Tomsichcf0a4ba2017-03-24 19:24:24 +01001386#endif
Kever Yange54d26a2016-08-12 17:47:15 +08001387
Kever Yange1980532017-02-13 17:38:56 +08001388#if CONFIG_IS_ENABLED(OF_PLATDATA)
1389 struct rk3399_pmuclk_plat *plat = dev_get_platdata(dev);
Kever Yange54d26a2016-08-12 17:47:15 +08001390
Simon Glass1b1fe412017-08-29 14:15:50 -06001391 priv->pmucru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Kever Yange1980532017-02-13 17:38:56 +08001392#endif
1393
1394#ifndef CONFIG_SPL_BUILD
1395 pmuclk_init(priv->pmucru);
1396#endif
Kever Yange54d26a2016-08-12 17:47:15 +08001397 return 0;
1398}
1399
1400static int rk3399_pmuclk_ofdata_to_platdata(struct udevice *dev)
1401{
Kever Yange1980532017-02-13 17:38:56 +08001402#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Kever Yange54d26a2016-08-12 17:47:15 +08001403 struct rk3399_pmuclk_priv *priv = dev_get_priv(dev);
1404
Philipp Tomsich44d76842017-09-12 17:32:24 +02001405 priv->pmucru = dev_read_addr_ptr(dev);
Kever Yange1980532017-02-13 17:38:56 +08001406#endif
Kever Yange54d26a2016-08-12 17:47:15 +08001407 return 0;
1408}
1409
Elaine Zhang432976f2017-12-19 18:22:38 +08001410static int rk3399_pmuclk_bind(struct udevice *dev)
1411{
1412#if CONFIG_IS_ENABLED(CONFIG_RESET_ROCKCHIP)
1413 int ret;
1414
1415 ret = offsetof(struct rk3399_pmucru, pmucru_softrst_con[0]);
1416 ret = rockchip_reset_bind(dev, ret, 2);
1417 if (ret)
1418 debug("Warning: software reset driver bind faile\n");
1419#endif
1420 return 0;
1421}
1422
Kever Yange54d26a2016-08-12 17:47:15 +08001423static const struct udevice_id rk3399_pmuclk_ids[] = {
1424 { .compatible = "rockchip,rk3399-pmucru" },
1425 { }
1426};
1427
Simon Glassd1dfea72016-10-01 20:04:51 -06001428U_BOOT_DRIVER(rockchip_rk3399_pmuclk) = {
Kever Yange1980532017-02-13 17:38:56 +08001429 .name = "rockchip_rk3399_pmucru",
Kever Yange54d26a2016-08-12 17:47:15 +08001430 .id = UCLASS_CLK,
1431 .of_match = rk3399_pmuclk_ids,
1432 .priv_auto_alloc_size = sizeof(struct rk3399_pmuclk_priv),
1433 .ofdata_to_platdata = rk3399_pmuclk_ofdata_to_platdata,
1434 .ops = &rk3399_pmuclk_ops,
1435 .probe = rk3399_pmuclk_probe,
Elaine Zhang432976f2017-12-19 18:22:38 +08001436 .bind = rk3399_pmuclk_bind,
Kever Yange1980532017-02-13 17:38:56 +08001437#if CONFIG_IS_ENABLED(OF_PLATDATA)
1438 .platdata_auto_alloc_size = sizeof(struct rk3399_pmuclk_plat),
1439#endif
Kever Yange54d26a2016-08-12 17:47:15 +08001440};