blob: d822acace14be9ba955c96f1c484e14ee0e8137f [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Kever Yangca19eac2016-07-29 10:35:25 +08002/*
3 * (C) Copyright 2015 Google, Inc
Philipp Tomsichc31ee922017-04-20 22:05:49 +02004 * (C) 2017 Theobroma Systems Design und Consulting GmbH
Kever Yangca19eac2016-07-29 10:35:25 +08005 */
6
7#include <common.h>
8#include <clk-uclass.h>
9#include <dm.h>
Kever Yange1980532017-02-13 17:38:56 +080010#include <dt-structs.h>
Kever Yangca19eac2016-07-29 10:35:25 +080011#include <errno.h>
Simon Glass9bc15642020-02-03 07:36:16 -070012#include <malloc.h>
Kever Yange1980532017-02-13 17:38:56 +080013#include <mapmem.h>
Kever Yangca19eac2016-07-29 10:35:25 +080014#include <syscon.h>
David Wuf91b9b42017-09-20 14:38:58 +080015#include <bitfield.h>
Kever Yangca19eac2016-07-29 10:35:25 +080016#include <asm/io.h>
Kever Yang9fbe17c2019-03-28 11:01:23 +080017#include <asm/arch-rockchip/clock.h>
Jagan Teki783acfd2020-01-09 14:22:17 +053018#include <asm/arch-rockchip/cru.h>
Kever Yang9fbe17c2019-03-28 11:01:23 +080019#include <asm/arch-rockchip/hardware.h>
Kever Yangca19eac2016-07-29 10:35:25 +080020#include <dm/lists.h>
21#include <dt-bindings/clock/rk3399-cru.h>
22
Kever Yange1980532017-02-13 17:38:56 +080023#if CONFIG_IS_ENABLED(OF_PLATDATA)
24struct rk3399_clk_plat {
25 struct dtd_rockchip_rk3399_cru dtd;
Kever Yange54d26a2016-08-12 17:47:15 +080026};
27
Kever Yange1980532017-02-13 17:38:56 +080028struct rk3399_pmuclk_plat {
29 struct dtd_rockchip_rk3399_pmucru dtd;
30};
31#endif
32
Kever Yangca19eac2016-07-29 10:35:25 +080033struct pll_div {
34 u32 refdiv;
35 u32 fbdiv;
36 u32 postdiv1;
37 u32 postdiv2;
38 u32 frac;
39};
40
41#define RATE_TO_DIV(input_rate, output_rate) \
Jagan Tekibef02a32019-07-15 23:51:10 +053042 ((input_rate) / (output_rate) - 1)
43#define DIV_TO_RATE(input_rate, div) ((input_rate) / ((div) + 1))
Kever Yangca19eac2016-07-29 10:35:25 +080044
45#define PLL_DIVISORS(hz, _refdiv, _postdiv1, _postdiv2) {\
46 .refdiv = _refdiv,\
47 .fbdiv = (u32)((u64)hz * _refdiv * _postdiv1 * _postdiv2 / OSC_HZ),\
48 .postdiv1 = _postdiv1, .postdiv2 = _postdiv2};
49
Philipp Tomsichcf0a4ba2017-03-24 19:24:24 +010050#if defined(CONFIG_SPL_BUILD)
Kever Yangca19eac2016-07-29 10:35:25 +080051static const struct pll_div gpll_init_cfg = PLL_DIVISORS(GPLL_HZ, 2, 2, 1);
52static const struct pll_div cpll_init_cfg = PLL_DIVISORS(CPLL_HZ, 1, 2, 2);
Philipp Tomsichcf0a4ba2017-03-24 19:24:24 +010053#else
Kever Yangca19eac2016-07-29 10:35:25 +080054static const struct pll_div ppll_init_cfg = PLL_DIVISORS(PPLL_HZ, 2, 2, 1);
Philipp Tomsichcf0a4ba2017-03-24 19:24:24 +010055#endif
Kever Yangca19eac2016-07-29 10:35:25 +080056
Jagan Tekibef02a32019-07-15 23:51:10 +053057static const struct pll_div apll_l_1600_cfg = PLL_DIVISORS(1600 * MHz, 3, 1, 1);
58static const struct pll_div apll_l_600_cfg = PLL_DIVISORS(600 * MHz, 1, 2, 1);
Kever Yangca19eac2016-07-29 10:35:25 +080059
60static const struct pll_div *apll_l_cfgs[] = {
61 [APLL_L_1600_MHZ] = &apll_l_1600_cfg,
62 [APLL_L_600_MHZ] = &apll_l_600_cfg,
63};
64
Jagan Tekibef02a32019-07-15 23:51:10 +053065static const struct pll_div apll_b_600_cfg = PLL_DIVISORS(600 * MHz, 1, 2, 1);
Christoph Muellner25c7ba92018-11-30 20:32:48 +010066static const struct pll_div *apll_b_cfgs[] = {
67 [APLL_B_600_MHZ] = &apll_b_600_cfg,
68};
69
Kever Yangca19eac2016-07-29 10:35:25 +080070enum {
71 /* PLL_CON0 */
72 PLL_FBDIV_MASK = 0xfff,
73 PLL_FBDIV_SHIFT = 0,
74
75 /* PLL_CON1 */
76 PLL_POSTDIV2_SHIFT = 12,
77 PLL_POSTDIV2_MASK = 0x7 << PLL_POSTDIV2_SHIFT,
78 PLL_POSTDIV1_SHIFT = 8,
79 PLL_POSTDIV1_MASK = 0x7 << PLL_POSTDIV1_SHIFT,
80 PLL_REFDIV_MASK = 0x3f,
81 PLL_REFDIV_SHIFT = 0,
82
83 /* PLL_CON2 */
84 PLL_LOCK_STATUS_SHIFT = 31,
85 PLL_LOCK_STATUS_MASK = 1 << PLL_LOCK_STATUS_SHIFT,
86 PLL_FRACDIV_MASK = 0xffffff,
87 PLL_FRACDIV_SHIFT = 0,
88
89 /* PLL_CON3 */
90 PLL_MODE_SHIFT = 8,
91 PLL_MODE_MASK = 3 << PLL_MODE_SHIFT,
92 PLL_MODE_SLOW = 0,
93 PLL_MODE_NORM,
94 PLL_MODE_DEEP,
95 PLL_DSMPD_SHIFT = 3,
96 PLL_DSMPD_MASK = 1 << PLL_DSMPD_SHIFT,
97 PLL_INTEGER_MODE = 1,
98
99 /* PMUCRU_CLKSEL_CON0 */
100 PMU_PCLK_DIV_CON_MASK = 0x1f,
101 PMU_PCLK_DIV_CON_SHIFT = 0,
102
103 /* PMUCRU_CLKSEL_CON1 */
104 SPI3_PLL_SEL_SHIFT = 7,
105 SPI3_PLL_SEL_MASK = 1 << SPI3_PLL_SEL_SHIFT,
106 SPI3_PLL_SEL_24M = 0,
107 SPI3_PLL_SEL_PPLL = 1,
108 SPI3_DIV_CON_SHIFT = 0x0,
109 SPI3_DIV_CON_MASK = 0x7f,
110
111 /* PMUCRU_CLKSEL_CON2 */
112 I2C_DIV_CON_MASK = 0x7f,
Kever Yange54d26a2016-08-12 17:47:15 +0800113 CLK_I2C8_DIV_CON_SHIFT = 8,
114 CLK_I2C0_DIV_CON_SHIFT = 0,
Kever Yangca19eac2016-07-29 10:35:25 +0800115
116 /* PMUCRU_CLKSEL_CON3 */
Kever Yange54d26a2016-08-12 17:47:15 +0800117 CLK_I2C4_DIV_CON_SHIFT = 0,
Kever Yangca19eac2016-07-29 10:35:25 +0800118
119 /* CLKSEL_CON0 */
120 ACLKM_CORE_L_DIV_CON_SHIFT = 8,
121 ACLKM_CORE_L_DIV_CON_MASK = 0x1f << ACLKM_CORE_L_DIV_CON_SHIFT,
122 CLK_CORE_L_PLL_SEL_SHIFT = 6,
123 CLK_CORE_L_PLL_SEL_MASK = 3 << CLK_CORE_L_PLL_SEL_SHIFT,
124 CLK_CORE_L_PLL_SEL_ALPLL = 0x0,
125 CLK_CORE_L_PLL_SEL_ABPLL = 0x1,
126 CLK_CORE_L_PLL_SEL_DPLL = 0x10,
127 CLK_CORE_L_PLL_SEL_GPLL = 0x11,
128 CLK_CORE_L_DIV_MASK = 0x1f,
129 CLK_CORE_L_DIV_SHIFT = 0,
130
131 /* CLKSEL_CON1 */
132 PCLK_DBG_L_DIV_SHIFT = 0x8,
133 PCLK_DBG_L_DIV_MASK = 0x1f << PCLK_DBG_L_DIV_SHIFT,
134 ATCLK_CORE_L_DIV_SHIFT = 0,
135 ATCLK_CORE_L_DIV_MASK = 0x1f << ATCLK_CORE_L_DIV_SHIFT,
136
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100137 /* CLKSEL_CON2 */
138 ACLKM_CORE_B_DIV_CON_SHIFT = 8,
139 ACLKM_CORE_B_DIV_CON_MASK = 0x1f << ACLKM_CORE_B_DIV_CON_SHIFT,
140 CLK_CORE_B_PLL_SEL_SHIFT = 6,
141 CLK_CORE_B_PLL_SEL_MASK = 3 << CLK_CORE_B_PLL_SEL_SHIFT,
142 CLK_CORE_B_PLL_SEL_ALPLL = 0x0,
143 CLK_CORE_B_PLL_SEL_ABPLL = 0x1,
144 CLK_CORE_B_PLL_SEL_DPLL = 0x10,
145 CLK_CORE_B_PLL_SEL_GPLL = 0x11,
146 CLK_CORE_B_DIV_MASK = 0x1f,
147 CLK_CORE_B_DIV_SHIFT = 0,
148
149 /* CLKSEL_CON3 */
150 PCLK_DBG_B_DIV_SHIFT = 0x8,
151 PCLK_DBG_B_DIV_MASK = 0x1f << PCLK_DBG_B_DIV_SHIFT,
152 ATCLK_CORE_B_DIV_SHIFT = 0,
153 ATCLK_CORE_B_DIV_MASK = 0x1f << ATCLK_CORE_B_DIV_SHIFT,
154
Kever Yangca19eac2016-07-29 10:35:25 +0800155 /* CLKSEL_CON14 */
156 PCLK_PERIHP_DIV_CON_SHIFT = 12,
157 PCLK_PERIHP_DIV_CON_MASK = 0x7 << PCLK_PERIHP_DIV_CON_SHIFT,
158 HCLK_PERIHP_DIV_CON_SHIFT = 8,
159 HCLK_PERIHP_DIV_CON_MASK = 3 << HCLK_PERIHP_DIV_CON_SHIFT,
160 ACLK_PERIHP_PLL_SEL_SHIFT = 7,
161 ACLK_PERIHP_PLL_SEL_MASK = 1 << ACLK_PERIHP_PLL_SEL_SHIFT,
162 ACLK_PERIHP_PLL_SEL_CPLL = 0,
163 ACLK_PERIHP_PLL_SEL_GPLL = 1,
164 ACLK_PERIHP_DIV_CON_SHIFT = 0,
165 ACLK_PERIHP_DIV_CON_MASK = 0x1f,
166
167 /* CLKSEL_CON21 */
168 ACLK_EMMC_PLL_SEL_SHIFT = 7,
169 ACLK_EMMC_PLL_SEL_MASK = 0x1 << ACLK_EMMC_PLL_SEL_SHIFT,
170 ACLK_EMMC_PLL_SEL_GPLL = 0x1,
171 ACLK_EMMC_DIV_CON_SHIFT = 0,
172 ACLK_EMMC_DIV_CON_MASK = 0x1f,
173
174 /* CLKSEL_CON22 */
175 CLK_EMMC_PLL_SHIFT = 8,
176 CLK_EMMC_PLL_MASK = 0x7 << CLK_EMMC_PLL_SHIFT,
177 CLK_EMMC_PLL_SEL_GPLL = 0x1,
Kever Yangdc850de2016-08-04 11:44:58 +0800178 CLK_EMMC_PLL_SEL_24M = 0x5,
Kever Yangca19eac2016-07-29 10:35:25 +0800179 CLK_EMMC_DIV_CON_SHIFT = 0,
180 CLK_EMMC_DIV_CON_MASK = 0x7f << CLK_EMMC_DIV_CON_SHIFT,
181
182 /* CLKSEL_CON23 */
183 PCLK_PERILP0_DIV_CON_SHIFT = 12,
184 PCLK_PERILP0_DIV_CON_MASK = 0x7 << PCLK_PERILP0_DIV_CON_SHIFT,
185 HCLK_PERILP0_DIV_CON_SHIFT = 8,
186 HCLK_PERILP0_DIV_CON_MASK = 3 << HCLK_PERILP0_DIV_CON_SHIFT,
187 ACLK_PERILP0_PLL_SEL_SHIFT = 7,
188 ACLK_PERILP0_PLL_SEL_MASK = 1 << ACLK_PERILP0_PLL_SEL_SHIFT,
189 ACLK_PERILP0_PLL_SEL_CPLL = 0,
190 ACLK_PERILP0_PLL_SEL_GPLL = 1,
191 ACLK_PERILP0_DIV_CON_SHIFT = 0,
192 ACLK_PERILP0_DIV_CON_MASK = 0x1f,
193
194 /* CLKSEL_CON25 */
195 PCLK_PERILP1_DIV_CON_SHIFT = 8,
196 PCLK_PERILP1_DIV_CON_MASK = 0x7 << PCLK_PERILP1_DIV_CON_SHIFT,
197 HCLK_PERILP1_PLL_SEL_SHIFT = 7,
198 HCLK_PERILP1_PLL_SEL_MASK = 1 << HCLK_PERILP1_PLL_SEL_SHIFT,
199 HCLK_PERILP1_PLL_SEL_CPLL = 0,
200 HCLK_PERILP1_PLL_SEL_GPLL = 1,
201 HCLK_PERILP1_DIV_CON_SHIFT = 0,
202 HCLK_PERILP1_DIV_CON_MASK = 0x1f,
203
204 /* CLKSEL_CON26 */
205 CLK_SARADC_DIV_CON_SHIFT = 8,
David Wuf91b9b42017-09-20 14:38:58 +0800206 CLK_SARADC_DIV_CON_MASK = GENMASK(15, 8),
207 CLK_SARADC_DIV_CON_WIDTH = 8,
Kever Yangca19eac2016-07-29 10:35:25 +0800208
209 /* CLKSEL_CON27 */
210 CLK_TSADC_SEL_X24M = 0x0,
211 CLK_TSADC_SEL_SHIFT = 15,
212 CLK_TSADC_SEL_MASK = 1 << CLK_TSADC_SEL_SHIFT,
213 CLK_TSADC_DIV_CON_SHIFT = 0,
214 CLK_TSADC_DIV_CON_MASK = 0x3ff,
215
216 /* CLKSEL_CON47 & CLKSEL_CON48 */
217 ACLK_VOP_PLL_SEL_SHIFT = 6,
218 ACLK_VOP_PLL_SEL_MASK = 0x3 << ACLK_VOP_PLL_SEL_SHIFT,
219 ACLK_VOP_PLL_SEL_CPLL = 0x1,
220 ACLK_VOP_DIV_CON_SHIFT = 0,
221 ACLK_VOP_DIV_CON_MASK = 0x1f << ACLK_VOP_DIV_CON_SHIFT,
222
223 /* CLKSEL_CON49 & CLKSEL_CON50 */
224 DCLK_VOP_DCLK_SEL_SHIFT = 11,
225 DCLK_VOP_DCLK_SEL_MASK = 1 << DCLK_VOP_DCLK_SEL_SHIFT,
226 DCLK_VOP_DCLK_SEL_DIVOUT = 0,
227 DCLK_VOP_PLL_SEL_SHIFT = 8,
228 DCLK_VOP_PLL_SEL_MASK = 3 << DCLK_VOP_PLL_SEL_SHIFT,
229 DCLK_VOP_PLL_SEL_VPLL = 0,
230 DCLK_VOP_DIV_CON_MASK = 0xff,
231 DCLK_VOP_DIV_CON_SHIFT = 0,
232
233 /* CLKSEL_CON58 */
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200234 CLK_SPI_PLL_SEL_WIDTH = 1,
235 CLK_SPI_PLL_SEL_MASK = ((1 < CLK_SPI_PLL_SEL_WIDTH) - 1),
236 CLK_SPI_PLL_SEL_CPLL = 0,
237 CLK_SPI_PLL_SEL_GPLL = 1,
238 CLK_SPI_PLL_DIV_CON_WIDTH = 7,
239 CLK_SPI_PLL_DIV_CON_MASK = ((1 << CLK_SPI_PLL_DIV_CON_WIDTH) - 1),
240
241 CLK_SPI5_PLL_DIV_CON_SHIFT = 8,
242 CLK_SPI5_PLL_SEL_SHIFT = 15,
Kever Yangca19eac2016-07-29 10:35:25 +0800243
244 /* CLKSEL_CON59 */
245 CLK_SPI1_PLL_SEL_SHIFT = 15,
246 CLK_SPI1_PLL_DIV_CON_SHIFT = 8,
247 CLK_SPI0_PLL_SEL_SHIFT = 7,
248 CLK_SPI0_PLL_DIV_CON_SHIFT = 0,
249
250 /* CLKSEL_CON60 */
251 CLK_SPI4_PLL_SEL_SHIFT = 15,
252 CLK_SPI4_PLL_DIV_CON_SHIFT = 8,
253 CLK_SPI2_PLL_SEL_SHIFT = 7,
254 CLK_SPI2_PLL_DIV_CON_SHIFT = 0,
255
256 /* CLKSEL_CON61 */
257 CLK_I2C_PLL_SEL_MASK = 1,
258 CLK_I2C_PLL_SEL_CPLL = 0,
259 CLK_I2C_PLL_SEL_GPLL = 1,
260 CLK_I2C5_PLL_SEL_SHIFT = 15,
261 CLK_I2C5_DIV_CON_SHIFT = 8,
262 CLK_I2C1_PLL_SEL_SHIFT = 7,
263 CLK_I2C1_DIV_CON_SHIFT = 0,
264
265 /* CLKSEL_CON62 */
266 CLK_I2C6_PLL_SEL_SHIFT = 15,
267 CLK_I2C6_DIV_CON_SHIFT = 8,
268 CLK_I2C2_PLL_SEL_SHIFT = 7,
269 CLK_I2C2_DIV_CON_SHIFT = 0,
270
271 /* CLKSEL_CON63 */
272 CLK_I2C7_PLL_SEL_SHIFT = 15,
273 CLK_I2C7_DIV_CON_SHIFT = 8,
274 CLK_I2C3_PLL_SEL_SHIFT = 7,
275 CLK_I2C3_DIV_CON_SHIFT = 0,
276
277 /* CRU_SOFTRST_CON4 */
278 RESETN_DDR0_REQ_SHIFT = 8,
279 RESETN_DDR0_REQ_MASK = 1 << RESETN_DDR0_REQ_SHIFT,
280 RESETN_DDRPHY0_REQ_SHIFT = 9,
281 RESETN_DDRPHY0_REQ_MASK = 1 << RESETN_DDRPHY0_REQ_SHIFT,
282 RESETN_DDR1_REQ_SHIFT = 12,
283 RESETN_DDR1_REQ_MASK = 1 << RESETN_DDR1_REQ_SHIFT,
284 RESETN_DDRPHY1_REQ_SHIFT = 13,
285 RESETN_DDRPHY1_REQ_MASK = 1 << RESETN_DDRPHY1_REQ_SHIFT,
286};
287
288#define VCO_MAX_KHZ (3200 * (MHz / KHz))
289#define VCO_MIN_KHZ (800 * (MHz / KHz))
290#define OUTPUT_MAX_KHZ (3200 * (MHz / KHz))
291#define OUTPUT_MIN_KHZ (16 * (MHz / KHz))
292
293/*
294 * the div restructions of pll in integer mode, these are defined in
295 * * CRU_*PLL_CON0 or PMUCRU_*PLL_CON0
296 */
297#define PLL_DIV_MIN 16
298#define PLL_DIV_MAX 3200
299
300/*
301 * How to calculate the PLL(from TRM V0.3 Part 1 Page 63):
302 * Formulas also embedded within the Fractional PLL Verilog model:
303 * If DSMPD = 1 (DSM is disabled, "integer mode")
304 * FOUTVCO = FREF / REFDIV * FBDIV
305 * FOUTPOSTDIV = FOUTVCO / POSTDIV1 / POSTDIV2
306 * Where:
307 * FOUTVCO = Fractional PLL non-divided output frequency
308 * FOUTPOSTDIV = Fractional PLL divided output frequency
309 * (output of second post divider)
310 * FREF = Fractional PLL input reference frequency, (the OSC_HZ 24MHz input)
311 * REFDIV = Fractional PLL input reference clock divider
312 * FBDIV = Integer value programmed into feedback divide
313 *
314 */
315static void rkclk_set_pll(u32 *pll_con, const struct pll_div *div)
316{
317 /* All 8 PLLs have same VCO and output frequency range restrictions. */
318 u32 vco_khz = OSC_HZ / 1000 * div->fbdiv / div->refdiv;
319 u32 output_khz = vco_khz / div->postdiv1 / div->postdiv2;
320
321 debug("PLL at %p: fbdiv=%d, refdiv=%d, postdiv1=%d, "
322 "postdiv2=%d, vco=%u khz, output=%u khz\n",
323 pll_con, div->fbdiv, div->refdiv, div->postdiv1,
324 div->postdiv2, vco_khz, output_khz);
325 assert(vco_khz >= VCO_MIN_KHZ && vco_khz <= VCO_MAX_KHZ &&
326 output_khz >= OUTPUT_MIN_KHZ && output_khz <= OUTPUT_MAX_KHZ &&
327 div->fbdiv >= PLL_DIV_MIN && div->fbdiv <= PLL_DIV_MAX);
328
329 /*
330 * When power on or changing PLL setting,
331 * we must force PLL into slow mode to ensure output stable clock.
332 */
333 rk_clrsetreg(&pll_con[3], PLL_MODE_MASK,
334 PLL_MODE_SLOW << PLL_MODE_SHIFT);
335
336 /* use integer mode */
337 rk_clrsetreg(&pll_con[3], PLL_DSMPD_MASK,
338 PLL_INTEGER_MODE << PLL_DSMPD_SHIFT);
339
340 rk_clrsetreg(&pll_con[0], PLL_FBDIV_MASK,
341 div->fbdiv << PLL_FBDIV_SHIFT);
342 rk_clrsetreg(&pll_con[1],
343 PLL_POSTDIV2_MASK | PLL_POSTDIV1_MASK |
344 PLL_REFDIV_MASK | PLL_REFDIV_SHIFT,
345 (div->postdiv2 << PLL_POSTDIV2_SHIFT) |
346 (div->postdiv1 << PLL_POSTDIV1_SHIFT) |
347 (div->refdiv << PLL_REFDIV_SHIFT));
348
349 /* waiting for pll lock */
350 while (!(readl(&pll_con[2]) & (1 << PLL_LOCK_STATUS_SHIFT)))
351 udelay(1);
352
353 /* pll enter normal mode */
354 rk_clrsetreg(&pll_con[3], PLL_MODE_MASK,
355 PLL_MODE_NORM << PLL_MODE_SHIFT);
356}
357
358static int pll_para_config(u32 freq_hz, struct pll_div *div)
359{
360 u32 ref_khz = OSC_HZ / KHz, refdiv, fbdiv = 0;
361 u32 postdiv1, postdiv2 = 1;
362 u32 fref_khz;
363 u32 diff_khz, best_diff_khz;
364 const u32 max_refdiv = 63, max_fbdiv = 3200, min_fbdiv = 16;
365 const u32 max_postdiv1 = 7, max_postdiv2 = 7;
366 u32 vco_khz;
367 u32 freq_khz = freq_hz / KHz;
368
369 if (!freq_hz) {
370 printf("%s: the frequency can't be 0 Hz\n", __func__);
371 return -1;
372 }
373
374 postdiv1 = DIV_ROUND_UP(VCO_MIN_KHZ, freq_khz);
375 if (postdiv1 > max_postdiv1) {
376 postdiv2 = DIV_ROUND_UP(postdiv1, max_postdiv1);
377 postdiv1 = DIV_ROUND_UP(postdiv1, postdiv2);
378 }
379
380 vco_khz = freq_khz * postdiv1 * postdiv2;
381
382 if (vco_khz < VCO_MIN_KHZ || vco_khz > VCO_MAX_KHZ ||
383 postdiv2 > max_postdiv2) {
384 printf("%s: Cannot find out a supported VCO"
385 " for Frequency (%uHz).\n", __func__, freq_hz);
386 return -1;
387 }
388
389 div->postdiv1 = postdiv1;
390 div->postdiv2 = postdiv2;
391
392 best_diff_khz = vco_khz;
393 for (refdiv = 1; refdiv < max_refdiv && best_diff_khz; refdiv++) {
394 fref_khz = ref_khz / refdiv;
395
396 fbdiv = vco_khz / fref_khz;
Jagan Tekibef02a32019-07-15 23:51:10 +0530397 if (fbdiv >= max_fbdiv || fbdiv <= min_fbdiv)
Kever Yangca19eac2016-07-29 10:35:25 +0800398 continue;
399 diff_khz = vco_khz - fbdiv * fref_khz;
400 if (fbdiv + 1 < max_fbdiv && diff_khz > fref_khz / 2) {
401 fbdiv++;
402 diff_khz = fref_khz - diff_khz;
403 }
404
405 if (diff_khz >= best_diff_khz)
406 continue;
407
408 best_diff_khz = diff_khz;
409 div->refdiv = refdiv;
410 div->fbdiv = fbdiv;
411 }
412
Jagan Tekibef02a32019-07-15 23:51:10 +0530413 if (best_diff_khz > 4 * (MHz / KHz)) {
Kever Yangca19eac2016-07-29 10:35:25 +0800414 printf("%s: Failed to match output frequency %u, "
415 "difference is %u Hz,exceed 4MHZ\n", __func__, freq_hz,
416 best_diff_khz * KHz);
417 return -1;
418 }
419 return 0;
420}
421
Jagan Teki783acfd2020-01-09 14:22:17 +0530422void rk3399_configure_cpu_l(struct rockchip_cru *cru,
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100423 enum apll_l_frequencies apll_l_freq)
Kever Yangca19eac2016-07-29 10:35:25 +0800424{
425 u32 aclkm_div;
426 u32 pclk_dbg_div;
427 u32 atclk_div;
428
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100429 /* Setup cluster L */
Kever Yangca19eac2016-07-29 10:35:25 +0800430 rkclk_set_pll(&cru->apll_l_con[0], apll_l_cfgs[apll_l_freq]);
431
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100432 aclkm_div = LPLL_HZ / ACLKM_CORE_L_HZ - 1;
433 assert((aclkm_div + 1) * ACLKM_CORE_L_HZ == LPLL_HZ &&
Kever Yangca19eac2016-07-29 10:35:25 +0800434 aclkm_div < 0x1f);
435
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100436 pclk_dbg_div = LPLL_HZ / PCLK_DBG_L_HZ - 1;
437 assert((pclk_dbg_div + 1) * PCLK_DBG_L_HZ == LPLL_HZ &&
Kever Yangca19eac2016-07-29 10:35:25 +0800438 pclk_dbg_div < 0x1f);
439
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100440 atclk_div = LPLL_HZ / ATCLK_CORE_L_HZ - 1;
441 assert((atclk_div + 1) * ATCLK_CORE_L_HZ == LPLL_HZ &&
Kever Yangca19eac2016-07-29 10:35:25 +0800442 atclk_div < 0x1f);
443
444 rk_clrsetreg(&cru->clksel_con[0],
445 ACLKM_CORE_L_DIV_CON_MASK | CLK_CORE_L_PLL_SEL_MASK |
446 CLK_CORE_L_DIV_MASK,
447 aclkm_div << ACLKM_CORE_L_DIV_CON_SHIFT |
448 CLK_CORE_L_PLL_SEL_ALPLL << CLK_CORE_L_PLL_SEL_SHIFT |
449 0 << CLK_CORE_L_DIV_SHIFT);
450
451 rk_clrsetreg(&cru->clksel_con[1],
452 PCLK_DBG_L_DIV_MASK | ATCLK_CORE_L_DIV_MASK,
453 pclk_dbg_div << PCLK_DBG_L_DIV_SHIFT |
454 atclk_div << ATCLK_CORE_L_DIV_SHIFT);
455}
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100456
Jagan Teki783acfd2020-01-09 14:22:17 +0530457void rk3399_configure_cpu_b(struct rockchip_cru *cru,
Christoph Muellner25c7ba92018-11-30 20:32:48 +0100458 enum apll_b_frequencies apll_b_freq)
459{
460 u32 aclkm_div;
461 u32 pclk_dbg_div;
462 u32 atclk_div;
463
464 /* Setup cluster B */
465 rkclk_set_pll(&cru->apll_b_con[0], apll_b_cfgs[apll_b_freq]);
466
467 aclkm_div = BPLL_HZ / ACLKM_CORE_B_HZ - 1;
468 assert((aclkm_div + 1) * ACLKM_CORE_B_HZ == BPLL_HZ &&
469 aclkm_div < 0x1f);
470
471 pclk_dbg_div = BPLL_HZ / PCLK_DBG_B_HZ - 1;
472 assert((pclk_dbg_div + 1) * PCLK_DBG_B_HZ == BPLL_HZ &&
473 pclk_dbg_div < 0x1f);
474
475 atclk_div = BPLL_HZ / ATCLK_CORE_B_HZ - 1;
476 assert((atclk_div + 1) * ATCLK_CORE_B_HZ == BPLL_HZ &&
477 atclk_div < 0x1f);
478
479 rk_clrsetreg(&cru->clksel_con[2],
480 ACLKM_CORE_B_DIV_CON_MASK | CLK_CORE_B_PLL_SEL_MASK |
481 CLK_CORE_B_DIV_MASK,
482 aclkm_div << ACLKM_CORE_B_DIV_CON_SHIFT |
483 CLK_CORE_B_PLL_SEL_ABPLL << CLK_CORE_B_PLL_SEL_SHIFT |
484 0 << CLK_CORE_B_DIV_SHIFT);
485
486 rk_clrsetreg(&cru->clksel_con[3],
487 PCLK_DBG_B_DIV_MASK | ATCLK_CORE_B_DIV_MASK,
488 pclk_dbg_div << PCLK_DBG_B_DIV_SHIFT |
489 atclk_div << ATCLK_CORE_B_DIV_SHIFT);
490}
491
Kever Yangca19eac2016-07-29 10:35:25 +0800492#define I2C_CLK_REG_MASK(bus) \
Jagan Tekibef02a32019-07-15 23:51:10 +0530493 (I2C_DIV_CON_MASK << CLK_I2C ##bus## _DIV_CON_SHIFT | \
494 CLK_I2C_PLL_SEL_MASK << CLK_I2C ##bus## _PLL_SEL_SHIFT)
Kever Yangca19eac2016-07-29 10:35:25 +0800495
496#define I2C_CLK_REG_VALUE(bus, clk_div) \
Jagan Tekibef02a32019-07-15 23:51:10 +0530497 ((clk_div - 1) << CLK_I2C ##bus## _DIV_CON_SHIFT | \
498 CLK_I2C_PLL_SEL_GPLL << CLK_I2C ##bus## _PLL_SEL_SHIFT)
Kever Yangca19eac2016-07-29 10:35:25 +0800499
500#define I2C_CLK_DIV_VALUE(con, bus) \
Jagan Tekibef02a32019-07-15 23:51:10 +0530501 ((con >> CLK_I2C ##bus## _DIV_CON_SHIFT) & I2C_DIV_CON_MASK)
Kever Yangca19eac2016-07-29 10:35:25 +0800502
Kever Yange54d26a2016-08-12 17:47:15 +0800503#define I2C_PMUCLK_REG_MASK(bus) \
Jagan Tekibef02a32019-07-15 23:51:10 +0530504 (I2C_DIV_CON_MASK << CLK_I2C ##bus## _DIV_CON_SHIFT)
Kever Yange54d26a2016-08-12 17:47:15 +0800505
506#define I2C_PMUCLK_REG_VALUE(bus, clk_div) \
Jagan Tekibef02a32019-07-15 23:51:10 +0530507 ((clk_div - 1) << CLK_I2C ##bus## _DIV_CON_SHIFT)
Kever Yange54d26a2016-08-12 17:47:15 +0800508
Jagan Teki783acfd2020-01-09 14:22:17 +0530509static ulong rk3399_i2c_get_clk(struct rockchip_cru *cru, ulong clk_id)
Kever Yangca19eac2016-07-29 10:35:25 +0800510{
511 u32 div, con;
512
513 switch (clk_id) {
514 case SCLK_I2C1:
515 con = readl(&cru->clksel_con[61]);
516 div = I2C_CLK_DIV_VALUE(con, 1);
517 break;
518 case SCLK_I2C2:
519 con = readl(&cru->clksel_con[62]);
520 div = I2C_CLK_DIV_VALUE(con, 2);
521 break;
522 case SCLK_I2C3:
523 con = readl(&cru->clksel_con[63]);
524 div = I2C_CLK_DIV_VALUE(con, 3);
525 break;
526 case SCLK_I2C5:
527 con = readl(&cru->clksel_con[61]);
528 div = I2C_CLK_DIV_VALUE(con, 5);
529 break;
530 case SCLK_I2C6:
531 con = readl(&cru->clksel_con[62]);
532 div = I2C_CLK_DIV_VALUE(con, 6);
533 break;
534 case SCLK_I2C7:
535 con = readl(&cru->clksel_con[63]);
536 div = I2C_CLK_DIV_VALUE(con, 7);
537 break;
538 default:
539 printf("do not support this i2c bus\n");
540 return -EINVAL;
541 }
542
543 return DIV_TO_RATE(GPLL_HZ, div);
544}
545
Jagan Teki783acfd2020-01-09 14:22:17 +0530546static ulong rk3399_i2c_set_clk(struct rockchip_cru *cru, ulong clk_id, uint hz)
Kever Yangca19eac2016-07-29 10:35:25 +0800547{
548 int src_clk_div;
549
550 /* i2c0,4,8 src clock from ppll, i2c1,2,3,5,6,7 src clock from gpll*/
551 src_clk_div = GPLL_HZ / hz;
552 assert(src_clk_div - 1 < 127);
553
554 switch (clk_id) {
555 case SCLK_I2C1:
556 rk_clrsetreg(&cru->clksel_con[61], I2C_CLK_REG_MASK(1),
557 I2C_CLK_REG_VALUE(1, src_clk_div));
558 break;
559 case SCLK_I2C2:
560 rk_clrsetreg(&cru->clksel_con[62], I2C_CLK_REG_MASK(2),
561 I2C_CLK_REG_VALUE(2, src_clk_div));
562 break;
563 case SCLK_I2C3:
564 rk_clrsetreg(&cru->clksel_con[63], I2C_CLK_REG_MASK(3),
565 I2C_CLK_REG_VALUE(3, src_clk_div));
566 break;
567 case SCLK_I2C5:
568 rk_clrsetreg(&cru->clksel_con[61], I2C_CLK_REG_MASK(5),
569 I2C_CLK_REG_VALUE(5, src_clk_div));
570 break;
571 case SCLK_I2C6:
572 rk_clrsetreg(&cru->clksel_con[62], I2C_CLK_REG_MASK(6),
573 I2C_CLK_REG_VALUE(6, src_clk_div));
574 break;
575 case SCLK_I2C7:
576 rk_clrsetreg(&cru->clksel_con[63], I2C_CLK_REG_MASK(7),
577 I2C_CLK_REG_VALUE(7, src_clk_div));
578 break;
579 default:
580 printf("do not support this i2c bus\n");
581 return -EINVAL;
582 }
583
Philipp Tomsich30d7c152017-04-20 22:05:50 +0200584 return rk3399_i2c_get_clk(cru, clk_id);
Kever Yangca19eac2016-07-29 10:35:25 +0800585}
586
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200587/*
588 * RK3399 SPI clocks have a common divider-width (7 bits) and a single bit
589 * to select either CPLL or GPLL as the clock-parent. The location within
590 * the enclosing CLKSEL_CON (i.e. div_shift and sel_shift) are variable.
591 */
592
593struct spi_clkreg {
Jagan Tekibef02a32019-07-15 23:51:10 +0530594 u8 reg; /* CLKSEL_CON[reg] register in CRU */
595 u8 div_shift;
596 u8 sel_shift;
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200597};
598
599/*
600 * The entries are numbered relative to their offset from SCLK_SPI0.
601 *
602 * Note that SCLK_SPI3 (which is configured via PMUCRU and requires different
603 * logic is not supported).
604 */
605static const struct spi_clkreg spi_clkregs[] = {
606 [0] = { .reg = 59,
607 .div_shift = CLK_SPI0_PLL_DIV_CON_SHIFT,
608 .sel_shift = CLK_SPI0_PLL_SEL_SHIFT, },
609 [1] = { .reg = 59,
610 .div_shift = CLK_SPI1_PLL_DIV_CON_SHIFT,
611 .sel_shift = CLK_SPI1_PLL_SEL_SHIFT, },
612 [2] = { .reg = 60,
613 .div_shift = CLK_SPI2_PLL_DIV_CON_SHIFT,
614 .sel_shift = CLK_SPI2_PLL_SEL_SHIFT, },
615 [3] = { .reg = 60,
616 .div_shift = CLK_SPI4_PLL_DIV_CON_SHIFT,
617 .sel_shift = CLK_SPI4_PLL_SEL_SHIFT, },
618 [4] = { .reg = 58,
619 .div_shift = CLK_SPI5_PLL_DIV_CON_SHIFT,
620 .sel_shift = CLK_SPI5_PLL_SEL_SHIFT, },
621};
622
Jagan Teki783acfd2020-01-09 14:22:17 +0530623static ulong rk3399_spi_get_clk(struct rockchip_cru *cru, ulong clk_id)
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200624{
625 const struct spi_clkreg *spiclk = NULL;
626 u32 div, val;
627
628 switch (clk_id) {
629 case SCLK_SPI0 ... SCLK_SPI5:
630 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
631 break;
632
633 default:
Masahiro Yamada81e10422017-09-16 14:10:41 +0900634 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200635 return -EINVAL;
636 }
637
638 val = readl(&cru->clksel_con[spiclk->reg]);
Philipp Tomsich8a4868f2017-11-22 19:45:04 +0100639 div = bitfield_extract(val, spiclk->div_shift,
640 CLK_SPI_PLL_DIV_CON_WIDTH);
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200641
642 return DIV_TO_RATE(GPLL_HZ, div);
643}
644
Jagan Teki783acfd2020-01-09 14:22:17 +0530645static ulong rk3399_spi_set_clk(struct rockchip_cru *cru, ulong clk_id, uint hz)
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200646{
647 const struct spi_clkreg *spiclk = NULL;
648 int src_clk_div;
649
Kever Yangf20995b2017-07-27 12:54:02 +0800650 src_clk_div = DIV_ROUND_UP(GPLL_HZ, hz) - 1;
651 assert(src_clk_div < 128);
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200652
653 switch (clk_id) {
654 case SCLK_SPI1 ... SCLK_SPI5:
655 spiclk = &spi_clkregs[clk_id - SCLK_SPI0];
656 break;
657
658 default:
Masahiro Yamada81e10422017-09-16 14:10:41 +0900659 pr_err("%s: SPI clk-id %ld not supported\n", __func__, clk_id);
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200660 return -EINVAL;
661 }
662
663 rk_clrsetreg(&cru->clksel_con[spiclk->reg],
664 ((CLK_SPI_PLL_DIV_CON_MASK << spiclk->div_shift) |
665 (CLK_SPI_PLL_SEL_GPLL << spiclk->sel_shift)),
666 ((src_clk_div << spiclk->div_shift) |
667 (CLK_SPI_PLL_SEL_GPLL << spiclk->sel_shift)));
668
Philipp Tomsich30d7c152017-04-20 22:05:50 +0200669 return rk3399_spi_get_clk(cru, clk_id);
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200670}
671
Jagan Teki783acfd2020-01-09 14:22:17 +0530672static ulong rk3399_vop_set_clk(struct rockchip_cru *cru, ulong clk_id, u32 hz)
Kever Yangca19eac2016-07-29 10:35:25 +0800673{
674 struct pll_div vpll_config = {0};
Jagan Tekibef02a32019-07-15 23:51:10 +0530675 int aclk_vop = 198 * MHz;
Kever Yangca19eac2016-07-29 10:35:25 +0800676 void *aclkreg_addr, *dclkreg_addr;
677 u32 div;
678
679 switch (clk_id) {
680 case DCLK_VOP0:
681 aclkreg_addr = &cru->clksel_con[47];
682 dclkreg_addr = &cru->clksel_con[49];
683 break;
684 case DCLK_VOP1:
685 aclkreg_addr = &cru->clksel_con[48];
686 dclkreg_addr = &cru->clksel_con[50];
687 break;
688 default:
689 return -EINVAL;
690 }
691 /* vop aclk source clk: cpll */
692 div = CPLL_HZ / aclk_vop;
693 assert(div - 1 < 32);
694
695 rk_clrsetreg(aclkreg_addr,
696 ACLK_VOP_PLL_SEL_MASK | ACLK_VOP_DIV_CON_MASK,
697 ACLK_VOP_PLL_SEL_CPLL << ACLK_VOP_PLL_SEL_SHIFT |
698 (div - 1) << ACLK_VOP_DIV_CON_SHIFT);
699
700 /* vop dclk source from vpll, and equals to vpll(means div == 1) */
701 if (pll_para_config(hz, &vpll_config))
702 return -1;
703
704 rkclk_set_pll(&cru->vpll_con[0], &vpll_config);
705
706 rk_clrsetreg(dclkreg_addr,
Jagan Tekibef02a32019-07-15 23:51:10 +0530707 DCLK_VOP_DCLK_SEL_MASK | DCLK_VOP_PLL_SEL_MASK |
Kever Yangca19eac2016-07-29 10:35:25 +0800708 DCLK_VOP_DIV_CON_MASK,
709 DCLK_VOP_DCLK_SEL_DIVOUT << DCLK_VOP_DCLK_SEL_SHIFT |
710 DCLK_VOP_PLL_SEL_VPLL << DCLK_VOP_PLL_SEL_SHIFT |
711 (1 - 1) << DCLK_VOP_DIV_CON_SHIFT);
712
713 return hz;
714}
715
Jagan Teki783acfd2020-01-09 14:22:17 +0530716static ulong rk3399_mmc_get_clk(struct rockchip_cru *cru, uint clk_id)
Kever Yangca19eac2016-07-29 10:35:25 +0800717{
718 u32 div, con;
719
720 switch (clk_id) {
Philipp Tomsich78a73142017-04-25 09:52:06 +0200721 case HCLK_SDMMC:
Kever Yangca19eac2016-07-29 10:35:25 +0800722 case SCLK_SDMMC:
723 con = readl(&cru->clksel_con[16]);
Kever Yang99b546d2017-07-27 12:54:01 +0800724 /* dwmmc controller have internal div 2 */
725 div = 2;
Kever Yangca19eac2016-07-29 10:35:25 +0800726 break;
727 case SCLK_EMMC:
728 con = readl(&cru->clksel_con[21]);
Kever Yang99b546d2017-07-27 12:54:01 +0800729 div = 1;
Kever Yangca19eac2016-07-29 10:35:25 +0800730 break;
731 default:
732 return -EINVAL;
733 }
Kever Yangca19eac2016-07-29 10:35:25 +0800734
Kever Yang99b546d2017-07-27 12:54:01 +0800735 div *= (con & CLK_EMMC_DIV_CON_MASK) >> CLK_EMMC_DIV_CON_SHIFT;
Kever Yangdc850de2016-08-04 11:44:58 +0800736 if ((con & CLK_EMMC_PLL_MASK) >> CLK_EMMC_PLL_SHIFT
737 == CLK_EMMC_PLL_SEL_24M)
Kever Yang99b546d2017-07-27 12:54:01 +0800738 return DIV_TO_RATE(OSC_HZ, div);
Kever Yangdc850de2016-08-04 11:44:58 +0800739 else
740 return DIV_TO_RATE(GPLL_HZ, div);
Kever Yangca19eac2016-07-29 10:35:25 +0800741}
742
Jagan Teki783acfd2020-01-09 14:22:17 +0530743static ulong rk3399_mmc_set_clk(struct rockchip_cru *cru,
Kever Yangca19eac2016-07-29 10:35:25 +0800744 ulong clk_id, ulong set_rate)
745{
746 int src_clk_div;
Jagan Tekibef02a32019-07-15 23:51:10 +0530747 int aclk_emmc = 198 * MHz;
Kever Yangca19eac2016-07-29 10:35:25 +0800748
749 switch (clk_id) {
Philipp Tomsich78a73142017-04-25 09:52:06 +0200750 case HCLK_SDMMC:
Kever Yangca19eac2016-07-29 10:35:25 +0800751 case SCLK_SDMMC:
Kever Yangdc850de2016-08-04 11:44:58 +0800752 /* Select clk_sdmmc source from GPLL by default */
Kever Yang99b546d2017-07-27 12:54:01 +0800753 /* mmc clock defaulg div 2 internal, provide double in cru */
754 src_clk_div = DIV_ROUND_UP(GPLL_HZ / 2, set_rate);
Kever Yangca19eac2016-07-29 10:35:25 +0800755
Kever Yangf20995b2017-07-27 12:54:02 +0800756 if (src_clk_div > 128) {
Kever Yangdc850de2016-08-04 11:44:58 +0800757 /* use 24MHz source for 400KHz clock */
Kever Yang99b546d2017-07-27 12:54:01 +0800758 src_clk_div = DIV_ROUND_UP(OSC_HZ / 2, set_rate);
Kever Yangf20995b2017-07-27 12:54:02 +0800759 assert(src_clk_div - 1 < 128);
Kever Yangdc850de2016-08-04 11:44:58 +0800760 rk_clrsetreg(&cru->clksel_con[16],
761 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
762 CLK_EMMC_PLL_SEL_24M << CLK_EMMC_PLL_SHIFT |
763 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
764 } else {
765 rk_clrsetreg(&cru->clksel_con[16],
766 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
767 CLK_EMMC_PLL_SEL_GPLL << CLK_EMMC_PLL_SHIFT |
768 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
769 }
Kever Yangca19eac2016-07-29 10:35:25 +0800770 break;
771 case SCLK_EMMC:
772 /* Select aclk_emmc source from GPLL */
Jagan Tekibef02a32019-07-15 23:51:10 +0530773 src_clk_div = DIV_ROUND_UP(GPLL_HZ, aclk_emmc);
Kever Yangf20995b2017-07-27 12:54:02 +0800774 assert(src_clk_div - 1 < 32);
Kever Yangca19eac2016-07-29 10:35:25 +0800775
776 rk_clrsetreg(&cru->clksel_con[21],
777 ACLK_EMMC_PLL_SEL_MASK | ACLK_EMMC_DIV_CON_MASK,
778 ACLK_EMMC_PLL_SEL_GPLL << ACLK_EMMC_PLL_SEL_SHIFT |
779 (src_clk_div - 1) << ACLK_EMMC_DIV_CON_SHIFT);
780
781 /* Select clk_emmc source from GPLL too */
Kever Yangf20995b2017-07-27 12:54:02 +0800782 src_clk_div = DIV_ROUND_UP(GPLL_HZ, set_rate);
783 assert(src_clk_div - 1 < 128);
Kever Yangca19eac2016-07-29 10:35:25 +0800784
785 rk_clrsetreg(&cru->clksel_con[22],
786 CLK_EMMC_PLL_MASK | CLK_EMMC_DIV_CON_MASK,
787 CLK_EMMC_PLL_SEL_GPLL << CLK_EMMC_PLL_SHIFT |
788 (src_clk_div - 1) << CLK_EMMC_DIV_CON_SHIFT);
789 break;
790 default:
791 return -EINVAL;
792 }
793 return rk3399_mmc_get_clk(cru, clk_id);
794}
795
Jagan Teki783acfd2020-01-09 14:22:17 +0530796static ulong rk3399_gmac_set_clk(struct rockchip_cru *cru, ulong rate)
Philipp Tomsichf4ba6ed2018-01-08 13:11:01 +0100797{
798 ulong ret;
799
800 /*
801 * The RGMII CLK can be derived either from an external "clkin"
802 * or can be generated from internally by a divider from SCLK_MAC.
803 */
804 if (readl(&cru->clksel_con[19]) & BIT(4)) {
805 /* An external clock will always generate the right rate... */
806 ret = rate;
807 } else {
808 /*
809 * No platform uses an internal clock to date.
810 * Implement this once it becomes necessary and print an error
811 * if someone tries to use it (while it remains unimplemented).
812 */
813 pr_err("%s: internal clock is UNIMPLEMENTED\n", __func__);
814 ret = 0;
815 }
816
817 return ret;
818}
819
Kever Yange1980532017-02-13 17:38:56 +0800820#define PMUSGRF_DDR_RGN_CON16 0xff330040
Jagan Teki783acfd2020-01-09 14:22:17 +0530821static ulong rk3399_ddr_set_clk(struct rockchip_cru *cru,
Kever Yange1980532017-02-13 17:38:56 +0800822 ulong set_rate)
823{
824 struct pll_div dpll_cfg;
825
826 /* IC ECO bug, need to set this register */
827 writel(0xc000c000, PMUSGRF_DDR_RGN_CON16);
828
829 /* clk_ddrc == DPLL = 24MHz / refdiv * fbdiv / postdiv1 / postdiv2 */
830 switch (set_rate) {
Jagan Teki4833f322019-07-16 17:27:35 +0530831 case 50 * MHz:
832 dpll_cfg = (struct pll_div)
833 {.refdiv = 1, .fbdiv = 12, .postdiv1 = 3, .postdiv2 = 2};
834 break;
Jagan Tekibef02a32019-07-15 23:51:10 +0530835 case 200 * MHz:
Kever Yange1980532017-02-13 17:38:56 +0800836 dpll_cfg = (struct pll_div)
837 {.refdiv = 1, .fbdiv = 50, .postdiv1 = 6, .postdiv2 = 1};
838 break;
Jagan Tekibef02a32019-07-15 23:51:10 +0530839 case 300 * MHz:
Kever Yange1980532017-02-13 17:38:56 +0800840 dpll_cfg = (struct pll_div)
841 {.refdiv = 2, .fbdiv = 100, .postdiv1 = 4, .postdiv2 = 1};
842 break;
Jagan Tekif0b06312019-07-16 17:27:36 +0530843 case 400 * MHz:
844 dpll_cfg = (struct pll_div)
845 {.refdiv = 1, .fbdiv = 50, .postdiv1 = 3, .postdiv2 = 1};
846 break;
Jagan Tekibef02a32019-07-15 23:51:10 +0530847 case 666 * MHz:
Kever Yange1980532017-02-13 17:38:56 +0800848 dpll_cfg = (struct pll_div)
849 {.refdiv = 2, .fbdiv = 111, .postdiv1 = 2, .postdiv2 = 1};
850 break;
Jagan Tekibef02a32019-07-15 23:51:10 +0530851 case 800 * MHz:
Kever Yange1980532017-02-13 17:38:56 +0800852 dpll_cfg = (struct pll_div)
853 {.refdiv = 1, .fbdiv = 100, .postdiv1 = 3, .postdiv2 = 1};
854 break;
Jagan Tekibef02a32019-07-15 23:51:10 +0530855 case 933 * MHz:
Kever Yange1980532017-02-13 17:38:56 +0800856 dpll_cfg = (struct pll_div)
857 {.refdiv = 1, .fbdiv = 116, .postdiv1 = 3, .postdiv2 = 1};
858 break;
859 default:
Masahiro Yamada81e10422017-09-16 14:10:41 +0900860 pr_err("Unsupported SDRAM frequency!,%ld\n", set_rate);
Kever Yange1980532017-02-13 17:38:56 +0800861 }
862 rkclk_set_pll(&cru->dpll_con[0], &dpll_cfg);
863
864 return set_rate;
865}
David Wuf91b9b42017-09-20 14:38:58 +0800866
Jagan Teki783acfd2020-01-09 14:22:17 +0530867static ulong rk3399_saradc_get_clk(struct rockchip_cru *cru)
David Wuf91b9b42017-09-20 14:38:58 +0800868{
869 u32 div, val;
870
871 val = readl(&cru->clksel_con[26]);
872 div = bitfield_extract(val, CLK_SARADC_DIV_CON_SHIFT,
873 CLK_SARADC_DIV_CON_WIDTH);
874
875 return DIV_TO_RATE(OSC_HZ, div);
876}
877
Jagan Teki783acfd2020-01-09 14:22:17 +0530878static ulong rk3399_saradc_set_clk(struct rockchip_cru *cru, uint hz)
David Wuf91b9b42017-09-20 14:38:58 +0800879{
880 int src_clk_div;
881
882 src_clk_div = DIV_ROUND_UP(OSC_HZ, hz) - 1;
883 assert(src_clk_div < 128);
884
885 rk_clrsetreg(&cru->clksel_con[26],
886 CLK_SARADC_DIV_CON_MASK,
887 src_clk_div << CLK_SARADC_DIV_CON_SHIFT);
888
889 return rk3399_saradc_get_clk(cru);
890}
891
Kever Yangca19eac2016-07-29 10:35:25 +0800892static ulong rk3399_clk_get_rate(struct clk *clk)
893{
894 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
895 ulong rate = 0;
896
897 switch (clk->id) {
898 case 0 ... 63:
899 return 0;
Philipp Tomsich78a73142017-04-25 09:52:06 +0200900 case HCLK_SDMMC:
Kever Yangca19eac2016-07-29 10:35:25 +0800901 case SCLK_SDMMC:
902 case SCLK_EMMC:
903 rate = rk3399_mmc_get_clk(priv->cru, clk->id);
904 break;
905 case SCLK_I2C1:
906 case SCLK_I2C2:
907 case SCLK_I2C3:
908 case SCLK_I2C5:
909 case SCLK_I2C6:
910 case SCLK_I2C7:
911 rate = rk3399_i2c_get_clk(priv->cru, clk->id);
912 break;
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200913 case SCLK_SPI0...SCLK_SPI5:
914 rate = rk3399_spi_get_clk(priv->cru, clk->id);
915 break;
916 case SCLK_UART0:
Christoph Muellnere5607a02019-05-07 10:58:44 +0200917 case SCLK_UART1:
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200918 case SCLK_UART2:
Christoph Muellnere5607a02019-05-07 10:58:44 +0200919 case SCLK_UART3:
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200920 return 24000000;
Philipp Tomsich10b594b2017-04-28 18:33:57 +0200921 case PCLK_HDMI_CTRL:
922 break;
Kever Yangca19eac2016-07-29 10:35:25 +0800923 case DCLK_VOP0:
924 case DCLK_VOP1:
925 break;
Philipp Tomsichd10b45e2017-04-28 17:11:55 +0200926 case PCLK_EFUSE1024NS:
927 break;
David Wuf91b9b42017-09-20 14:38:58 +0800928 case SCLK_SARADC:
929 rate = rk3399_saradc_get_clk(priv->cru);
930 break;
Simon Glassd27b3172019-01-21 14:53:30 -0700931 case ACLK_VIO:
932 case ACLK_HDCP:
933 case ACLK_GIC_PRE:
934 case PCLK_DDR:
935 break;
Kever Yangca19eac2016-07-29 10:35:25 +0800936 default:
Simon Glassd27b3172019-01-21 14:53:30 -0700937 log_debug("Unknown clock %lu\n", clk->id);
Kever Yangca19eac2016-07-29 10:35:25 +0800938 return -ENOENT;
939 }
940
941 return rate;
942}
943
944static ulong rk3399_clk_set_rate(struct clk *clk, ulong rate)
945{
946 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
947 ulong ret = 0;
948
949 switch (clk->id) {
950 case 0 ... 63:
951 return 0;
Philipp Tomsich2d20a632018-01-08 14:00:27 +0100952
953 case ACLK_PERIHP:
954 case HCLK_PERIHP:
955 case PCLK_PERIHP:
956 return 0;
957
958 case ACLK_PERILP0:
959 case HCLK_PERILP0:
960 case PCLK_PERILP0:
961 return 0;
962
963 case ACLK_CCI:
964 return 0;
965
966 case HCLK_PERILP1:
967 case PCLK_PERILP1:
968 return 0;
969
Philipp Tomsich78a73142017-04-25 09:52:06 +0200970 case HCLK_SDMMC:
Kever Yangca19eac2016-07-29 10:35:25 +0800971 case SCLK_SDMMC:
972 case SCLK_EMMC:
973 ret = rk3399_mmc_set_clk(priv->cru, clk->id, rate);
974 break;
Philipp Tomsichbfa896c2017-03-24 19:24:25 +0100975 case SCLK_MAC:
Philipp Tomsichf4ba6ed2018-01-08 13:11:01 +0100976 ret = rk3399_gmac_set_clk(priv->cru, rate);
Philipp Tomsichbfa896c2017-03-24 19:24:25 +0100977 break;
Kever Yangca19eac2016-07-29 10:35:25 +0800978 case SCLK_I2C1:
979 case SCLK_I2C2:
980 case SCLK_I2C3:
981 case SCLK_I2C5:
982 case SCLK_I2C6:
983 case SCLK_I2C7:
984 ret = rk3399_i2c_set_clk(priv->cru, clk->id, rate);
985 break;
Philipp Tomsichc31ee922017-04-20 22:05:49 +0200986 case SCLK_SPI0...SCLK_SPI5:
987 ret = rk3399_spi_set_clk(priv->cru, clk->id, rate);
988 break;
Philipp Tomsich10b594b2017-04-28 18:33:57 +0200989 case PCLK_HDMI_CTRL:
990 case PCLK_VIO_GRF:
991 /* the PCLK gates for video are enabled by default */
992 break;
Kever Yangca19eac2016-07-29 10:35:25 +0800993 case DCLK_VOP0:
994 case DCLK_VOP1:
Kever Yange54d26a2016-08-12 17:47:15 +0800995 ret = rk3399_vop_set_clk(priv->cru, clk->id, rate);
Kever Yangca19eac2016-07-29 10:35:25 +0800996 break;
Jagan Teki99f0f822020-04-02 17:11:21 +0530997 case ACLK_VOP1:
998 case HCLK_VOP1:
Jagan Teki3f26bce2020-04-28 15:30:16 +0530999 case HCLK_SD:
Jagan Teki99f0f822020-04-02 17:11:21 +05301000 /**
1001 * assigned-clocks handling won't require for vopl, so
1002 * return 0 to satisfy clk_set_defaults during device probe.
1003 */
1004 return 0;
Kever Yange1980532017-02-13 17:38:56 +08001005 case SCLK_DDRCLK:
1006 ret = rk3399_ddr_set_clk(priv->cru, rate);
1007 break;
Philipp Tomsichd10b45e2017-04-28 17:11:55 +02001008 case PCLK_EFUSE1024NS:
1009 break;
David Wuf91b9b42017-09-20 14:38:58 +08001010 case SCLK_SARADC:
1011 ret = rk3399_saradc_set_clk(priv->cru, rate);
1012 break;
Simon Glassd27b3172019-01-21 14:53:30 -07001013 case ACLK_VIO:
1014 case ACLK_HDCP:
1015 case ACLK_GIC_PRE:
1016 case PCLK_DDR:
1017 return 0;
Kever Yangca19eac2016-07-29 10:35:25 +08001018 default:
Simon Glassd27b3172019-01-21 14:53:30 -07001019 log_debug("Unknown clock %lu\n", clk->id);
Kever Yangca19eac2016-07-29 10:35:25 +08001020 return -ENOENT;
1021 }
1022
1023 return ret;
1024}
1025
Jagan Tekibef02a32019-07-15 23:51:10 +05301026static int __maybe_unused rk3399_gmac_set_parent(struct clk *clk,
1027 struct clk *parent)
Philipp Tomsichf4ba6ed2018-01-08 13:11:01 +01001028{
1029 struct rk3399_clk_priv *priv = dev_get_priv(clk->dev);
1030 const char *clock_output_name;
1031 int ret;
1032
1033 /*
1034 * If the requested parent is in the same clock-controller and
1035 * the id is SCLK_MAC ("clk_gmac"), switch to the internal clock.
1036 */
Jagan Tekibef02a32019-07-15 23:51:10 +05301037 if (parent->dev == clk->dev && parent->id == SCLK_MAC) {
Philipp Tomsichf4ba6ed2018-01-08 13:11:01 +01001038 debug("%s: switching RGMII to SCLK_MAC\n", __func__);
1039 rk_clrreg(&priv->cru->clksel_con[19], BIT(4));
1040 return 0;
1041 }
1042
1043 /*
1044 * Otherwise, we need to check the clock-output-names of the
1045 * requested parent to see if the requested id is "clkin_gmac".
1046 */
1047 ret = dev_read_string_index(parent->dev, "clock-output-names",
1048 parent->id, &clock_output_name);
1049 if (ret < 0)
1050 return -ENODATA;
1051
1052 /* If this is "clkin_gmac", switch to the external clock input */
1053 if (!strcmp(clock_output_name, "clkin_gmac")) {
1054 debug("%s: switching RGMII to CLKIN\n", __func__);
1055 rk_setreg(&priv->cru->clksel_con[19], BIT(4));
1056 return 0;
1057 }
1058
1059 return -EINVAL;
1060}
1061
Jagan Tekibef02a32019-07-15 23:51:10 +05301062static int __maybe_unused rk3399_clk_set_parent(struct clk *clk,
1063 struct clk *parent)
Philipp Tomsichf4ba6ed2018-01-08 13:11:01 +01001064{
1065 switch (clk->id) {
1066 case SCLK_RMII_SRC:
1067 return rk3399_gmac_set_parent(clk, parent);
1068 }
1069
1070 debug("%s: unsupported clk %ld\n", __func__, clk->id);
1071 return -ENOENT;
1072}
1073
Kever Yangca19eac2016-07-29 10:35:25 +08001074static struct clk_ops rk3399_clk_ops = {
1075 .get_rate = rk3399_clk_get_rate,
1076 .set_rate = rk3399_clk_set_rate,
Philipp Tomsich6dd2fb42018-01-25 15:27:10 +01001077#if CONFIG_IS_ENABLED(OF_CONTROL) && !CONFIG_IS_ENABLED(OF_PLATDATA)
Philipp Tomsichf4ba6ed2018-01-08 13:11:01 +01001078 .set_parent = rk3399_clk_set_parent,
Philipp Tomsich6dd2fb42018-01-25 15:27:10 +01001079#endif
Kever Yangca19eac2016-07-29 10:35:25 +08001080};
1081
Kever Yang05a14b02017-10-12 15:27:29 +08001082#ifdef CONFIG_SPL_BUILD
Jagan Teki783acfd2020-01-09 14:22:17 +05301083static void rkclk_init(struct rockchip_cru *cru)
Kever Yang05a14b02017-10-12 15:27:29 +08001084{
1085 u32 aclk_div;
1086 u32 hclk_div;
1087 u32 pclk_div;
1088
Christoph Muellner25c7ba92018-11-30 20:32:48 +01001089 rk3399_configure_cpu_l(cru, APLL_L_600_MHZ);
1090 rk3399_configure_cpu_b(cru, APLL_B_600_MHZ);
Kever Yang05a14b02017-10-12 15:27:29 +08001091 /*
1092 * some cru registers changed by bootrom, we'd better reset them to
1093 * reset/default values described in TRM to avoid confusion in kernel.
1094 * Please consider these three lines as a fix of bootrom bug.
1095 */
1096 rk_clrsetreg(&cru->clksel_con[12], 0xffff, 0x4101);
1097 rk_clrsetreg(&cru->clksel_con[19], 0xffff, 0x033f);
1098 rk_clrsetreg(&cru->clksel_con[56], 0x0003, 0x0003);
1099
1100 /* configure gpll cpll */
1101 rkclk_set_pll(&cru->gpll_con[0], &gpll_init_cfg);
1102 rkclk_set_pll(&cru->cpll_con[0], &cpll_init_cfg);
1103
1104 /* configure perihp aclk, hclk, pclk */
1105 aclk_div = GPLL_HZ / PERIHP_ACLK_HZ - 1;
1106 assert((aclk_div + 1) * PERIHP_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
1107
1108 hclk_div = PERIHP_ACLK_HZ / PERIHP_HCLK_HZ - 1;
1109 assert((hclk_div + 1) * PERIHP_HCLK_HZ ==
1110 PERIHP_ACLK_HZ && (hclk_div < 0x4));
1111
1112 pclk_div = PERIHP_ACLK_HZ / PERIHP_PCLK_HZ - 1;
1113 assert((pclk_div + 1) * PERIHP_PCLK_HZ ==
1114 PERIHP_ACLK_HZ && (pclk_div < 0x7));
1115
1116 rk_clrsetreg(&cru->clksel_con[14],
1117 PCLK_PERIHP_DIV_CON_MASK | HCLK_PERIHP_DIV_CON_MASK |
1118 ACLK_PERIHP_PLL_SEL_MASK | ACLK_PERIHP_DIV_CON_MASK,
1119 pclk_div << PCLK_PERIHP_DIV_CON_SHIFT |
1120 hclk_div << HCLK_PERIHP_DIV_CON_SHIFT |
1121 ACLK_PERIHP_PLL_SEL_GPLL << ACLK_PERIHP_PLL_SEL_SHIFT |
1122 aclk_div << ACLK_PERIHP_DIV_CON_SHIFT);
1123
1124 /* configure perilp0 aclk, hclk, pclk */
1125 aclk_div = GPLL_HZ / PERILP0_ACLK_HZ - 1;
1126 assert((aclk_div + 1) * PERILP0_ACLK_HZ == GPLL_HZ && aclk_div < 0x1f);
1127
1128 hclk_div = PERILP0_ACLK_HZ / PERILP0_HCLK_HZ - 1;
1129 assert((hclk_div + 1) * PERILP0_HCLK_HZ ==
1130 PERILP0_ACLK_HZ && (hclk_div < 0x4));
1131
1132 pclk_div = PERILP0_ACLK_HZ / PERILP0_PCLK_HZ - 1;
1133 assert((pclk_div + 1) * PERILP0_PCLK_HZ ==
1134 PERILP0_ACLK_HZ && (pclk_div < 0x7));
1135
1136 rk_clrsetreg(&cru->clksel_con[23],
1137 PCLK_PERILP0_DIV_CON_MASK | HCLK_PERILP0_DIV_CON_MASK |
1138 ACLK_PERILP0_PLL_SEL_MASK | ACLK_PERILP0_DIV_CON_MASK,
1139 pclk_div << PCLK_PERILP0_DIV_CON_SHIFT |
1140 hclk_div << HCLK_PERILP0_DIV_CON_SHIFT |
1141 ACLK_PERILP0_PLL_SEL_GPLL << ACLK_PERILP0_PLL_SEL_SHIFT |
1142 aclk_div << ACLK_PERILP0_DIV_CON_SHIFT);
1143
1144 /* perilp1 hclk select gpll as source */
1145 hclk_div = GPLL_HZ / PERILP1_HCLK_HZ - 1;
1146 assert((hclk_div + 1) * PERILP1_HCLK_HZ ==
1147 GPLL_HZ && (hclk_div < 0x1f));
1148
1149 pclk_div = PERILP1_HCLK_HZ / PERILP1_HCLK_HZ - 1;
1150 assert((pclk_div + 1) * PERILP1_HCLK_HZ ==
1151 PERILP1_HCLK_HZ && (hclk_div < 0x7));
1152
1153 rk_clrsetreg(&cru->clksel_con[25],
1154 PCLK_PERILP1_DIV_CON_MASK | HCLK_PERILP1_DIV_CON_MASK |
1155 HCLK_PERILP1_PLL_SEL_MASK,
1156 pclk_div << PCLK_PERILP1_DIV_CON_SHIFT |
1157 hclk_div << HCLK_PERILP1_DIV_CON_SHIFT |
1158 HCLK_PERILP1_PLL_SEL_GPLL << HCLK_PERILP1_PLL_SEL_SHIFT);
1159}
1160#endif
1161
Kever Yangca19eac2016-07-29 10:35:25 +08001162static int rk3399_clk_probe(struct udevice *dev)
1163{
Kever Yange1980532017-02-13 17:38:56 +08001164#ifdef CONFIG_SPL_BUILD
Kever Yangca19eac2016-07-29 10:35:25 +08001165 struct rk3399_clk_priv *priv = dev_get_priv(dev);
1166
Kever Yange1980532017-02-13 17:38:56 +08001167#if CONFIG_IS_ENABLED(OF_PLATDATA)
1168 struct rk3399_clk_plat *plat = dev_get_platdata(dev);
Kever Yangca19eac2016-07-29 10:35:25 +08001169
Simon Glass1b1fe412017-08-29 14:15:50 -06001170 priv->cru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Kever Yange1980532017-02-13 17:38:56 +08001171#endif
1172 rkclk_init(priv->cru);
1173#endif
Kever Yangca19eac2016-07-29 10:35:25 +08001174 return 0;
1175}
1176
1177static int rk3399_clk_ofdata_to_platdata(struct udevice *dev)
1178{
Kever Yange1980532017-02-13 17:38:56 +08001179#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Kever Yangca19eac2016-07-29 10:35:25 +08001180 struct rk3399_clk_priv *priv = dev_get_priv(dev);
1181
Philipp Tomsich44d76842017-09-12 17:32:24 +02001182 priv->cru = dev_read_addr_ptr(dev);
Kever Yange1980532017-02-13 17:38:56 +08001183#endif
Kever Yangca19eac2016-07-29 10:35:25 +08001184 return 0;
1185}
1186
1187static int rk3399_clk_bind(struct udevice *dev)
1188{
1189 int ret;
Kever Yang4fbb6c22017-11-03 15:16:13 +08001190 struct udevice *sys_child;
1191 struct sysreset_reg *priv;
Kever Yangca19eac2016-07-29 10:35:25 +08001192
1193 /* The reset driver does not have a device node, so bind it here */
Kever Yang4fbb6c22017-11-03 15:16:13 +08001194 ret = device_bind_driver(dev, "rockchip_sysreset", "sysreset",
1195 &sys_child);
1196 if (ret) {
1197 debug("Warning: No sysreset driver: ret=%d\n", ret);
1198 } else {
1199 priv = malloc(sizeof(struct sysreset_reg));
Jagan Teki783acfd2020-01-09 14:22:17 +05301200 priv->glb_srst_fst_value = offsetof(struct rockchip_cru,
Kever Yang4fbb6c22017-11-03 15:16:13 +08001201 glb_srst_fst_value);
Jagan Teki783acfd2020-01-09 14:22:17 +05301202 priv->glb_srst_snd_value = offsetof(struct rockchip_cru,
Kever Yang4fbb6c22017-11-03 15:16:13 +08001203 glb_srst_snd_value);
1204 sys_child->priv = priv;
1205 }
Kever Yangca19eac2016-07-29 10:35:25 +08001206
Heiko Stuebner416f8d32019-11-09 00:06:30 +01001207#if CONFIG_IS_ENABLED(RESET_ROCKCHIP)
Jagan Teki783acfd2020-01-09 14:22:17 +05301208 ret = offsetof(struct rockchip_cru, softrst_con[0]);
Elaine Zhang432976f2017-12-19 18:22:38 +08001209 ret = rockchip_reset_bind(dev, ret, 21);
1210 if (ret)
1211 debug("Warning: software reset driver bind faile\n");
1212#endif
1213
Kever Yangca19eac2016-07-29 10:35:25 +08001214 return 0;
1215}
1216
1217static const struct udevice_id rk3399_clk_ids[] = {
1218 { .compatible = "rockchip,rk3399-cru" },
1219 { }
1220};
1221
1222U_BOOT_DRIVER(clk_rk3399) = {
Kever Yange1980532017-02-13 17:38:56 +08001223 .name = "rockchip_rk3399_cru",
Kever Yangca19eac2016-07-29 10:35:25 +08001224 .id = UCLASS_CLK,
1225 .of_match = rk3399_clk_ids,
1226 .priv_auto_alloc_size = sizeof(struct rk3399_clk_priv),
1227 .ofdata_to_platdata = rk3399_clk_ofdata_to_platdata,
1228 .ops = &rk3399_clk_ops,
1229 .bind = rk3399_clk_bind,
1230 .probe = rk3399_clk_probe,
Kever Yange1980532017-02-13 17:38:56 +08001231#if CONFIG_IS_ENABLED(OF_PLATDATA)
1232 .platdata_auto_alloc_size = sizeof(struct rk3399_clk_plat),
1233#endif
Kever Yangca19eac2016-07-29 10:35:25 +08001234};
Kever Yange54d26a2016-08-12 17:47:15 +08001235
1236static ulong rk3399_i2c_get_pmuclk(struct rk3399_pmucru *pmucru, ulong clk_id)
1237{
1238 u32 div, con;
1239
1240 switch (clk_id) {
1241 case SCLK_I2C0_PMU:
1242 con = readl(&pmucru->pmucru_clksel[2]);
1243 div = I2C_CLK_DIV_VALUE(con, 0);
1244 break;
1245 case SCLK_I2C4_PMU:
1246 con = readl(&pmucru->pmucru_clksel[3]);
1247 div = I2C_CLK_DIV_VALUE(con, 4);
1248 break;
1249 case SCLK_I2C8_PMU:
1250 con = readl(&pmucru->pmucru_clksel[2]);
1251 div = I2C_CLK_DIV_VALUE(con, 8);
1252 break;
1253 default:
1254 printf("do not support this i2c bus\n");
1255 return -EINVAL;
1256 }
1257
1258 return DIV_TO_RATE(PPLL_HZ, div);
1259}
1260
1261static ulong rk3399_i2c_set_pmuclk(struct rk3399_pmucru *pmucru, ulong clk_id,
1262 uint hz)
1263{
1264 int src_clk_div;
1265
1266 src_clk_div = PPLL_HZ / hz;
1267 assert(src_clk_div - 1 < 127);
1268
1269 switch (clk_id) {
1270 case SCLK_I2C0_PMU:
1271 rk_clrsetreg(&pmucru->pmucru_clksel[2], I2C_PMUCLK_REG_MASK(0),
1272 I2C_PMUCLK_REG_VALUE(0, src_clk_div));
1273 break;
1274 case SCLK_I2C4_PMU:
1275 rk_clrsetreg(&pmucru->pmucru_clksel[3], I2C_PMUCLK_REG_MASK(4),
1276 I2C_PMUCLK_REG_VALUE(4, src_clk_div));
1277 break;
1278 case SCLK_I2C8_PMU:
1279 rk_clrsetreg(&pmucru->pmucru_clksel[2], I2C_PMUCLK_REG_MASK(8),
1280 I2C_PMUCLK_REG_VALUE(8, src_clk_div));
1281 break;
1282 default:
1283 printf("do not support this i2c bus\n");
1284 return -EINVAL;
1285 }
1286
1287 return DIV_TO_RATE(PPLL_HZ, src_clk_div);
1288}
1289
1290static ulong rk3399_pwm_get_clk(struct rk3399_pmucru *pmucru)
1291{
1292 u32 div, con;
1293
1294 /* PWM closk rate is same as pclk_pmu */
1295 con = readl(&pmucru->pmucru_clksel[0]);
1296 div = con & PMU_PCLK_DIV_CON_MASK;
1297
1298 return DIV_TO_RATE(PPLL_HZ, div);
1299}
1300
1301static ulong rk3399_pmuclk_get_rate(struct clk *clk)
1302{
1303 struct rk3399_pmuclk_priv *priv = dev_get_priv(clk->dev);
1304 ulong rate = 0;
1305
1306 switch (clk->id) {
Philipp Tomsich932908c2018-02-23 17:36:41 +01001307 case PLL_PPLL:
1308 return PPLL_HZ;
Kever Yange54d26a2016-08-12 17:47:15 +08001309 case PCLK_RKPWM_PMU:
1310 rate = rk3399_pwm_get_clk(priv->pmucru);
1311 break;
1312 case SCLK_I2C0_PMU:
1313 case SCLK_I2C4_PMU:
1314 case SCLK_I2C8_PMU:
1315 rate = rk3399_i2c_get_pmuclk(priv->pmucru, clk->id);
1316 break;
1317 default:
1318 return -ENOENT;
1319 }
1320
1321 return rate;
1322}
1323
1324static ulong rk3399_pmuclk_set_rate(struct clk *clk, ulong rate)
1325{
1326 struct rk3399_pmuclk_priv *priv = dev_get_priv(clk->dev);
1327 ulong ret = 0;
1328
1329 switch (clk->id) {
Philipp Tomsich932908c2018-02-23 17:36:41 +01001330 case PLL_PPLL:
1331 /*
1332 * This has already been set up and we don't want/need
1333 * to change it here. Accept the request though, as the
1334 * device-tree has this in an 'assigned-clocks' list.
1335 */
1336 return PPLL_HZ;
Kever Yange54d26a2016-08-12 17:47:15 +08001337 case SCLK_I2C0_PMU:
1338 case SCLK_I2C4_PMU:
1339 case SCLK_I2C8_PMU:
1340 ret = rk3399_i2c_set_pmuclk(priv->pmucru, clk->id, rate);
1341 break;
1342 default:
1343 return -ENOENT;
1344 }
1345
1346 return ret;
1347}
1348
1349static struct clk_ops rk3399_pmuclk_ops = {
1350 .get_rate = rk3399_pmuclk_get_rate,
1351 .set_rate = rk3399_pmuclk_set_rate,
1352};
1353
Kever Yange1980532017-02-13 17:38:56 +08001354#ifndef CONFIG_SPL_BUILD
Kever Yange54d26a2016-08-12 17:47:15 +08001355static void pmuclk_init(struct rk3399_pmucru *pmucru)
1356{
1357 u32 pclk_div;
1358
1359 /* configure pmu pll(ppll) */
1360 rkclk_set_pll(&pmucru->ppll_con[0], &ppll_init_cfg);
1361
1362 /* configure pmu pclk */
1363 pclk_div = PPLL_HZ / PMU_PCLK_HZ - 1;
Kever Yange54d26a2016-08-12 17:47:15 +08001364 rk_clrsetreg(&pmucru->pmucru_clksel[0],
1365 PMU_PCLK_DIV_CON_MASK,
1366 pclk_div << PMU_PCLK_DIV_CON_SHIFT);
1367}
Kever Yange1980532017-02-13 17:38:56 +08001368#endif
Kever Yange54d26a2016-08-12 17:47:15 +08001369
1370static int rk3399_pmuclk_probe(struct udevice *dev)
1371{
Philipp Tomsichcf0a4ba2017-03-24 19:24:24 +01001372#if CONFIG_IS_ENABLED(OF_PLATDATA) || !defined(CONFIG_SPL_BUILD)
Kever Yange54d26a2016-08-12 17:47:15 +08001373 struct rk3399_pmuclk_priv *priv = dev_get_priv(dev);
Philipp Tomsichcf0a4ba2017-03-24 19:24:24 +01001374#endif
Kever Yange54d26a2016-08-12 17:47:15 +08001375
Kever Yange1980532017-02-13 17:38:56 +08001376#if CONFIG_IS_ENABLED(OF_PLATDATA)
1377 struct rk3399_pmuclk_plat *plat = dev_get_platdata(dev);
Kever Yange54d26a2016-08-12 17:47:15 +08001378
Simon Glass1b1fe412017-08-29 14:15:50 -06001379 priv->pmucru = map_sysmem(plat->dtd.reg[0], plat->dtd.reg[1]);
Kever Yange1980532017-02-13 17:38:56 +08001380#endif
1381
1382#ifndef CONFIG_SPL_BUILD
1383 pmuclk_init(priv->pmucru);
1384#endif
Kever Yange54d26a2016-08-12 17:47:15 +08001385 return 0;
1386}
1387
1388static int rk3399_pmuclk_ofdata_to_platdata(struct udevice *dev)
1389{
Kever Yange1980532017-02-13 17:38:56 +08001390#if !CONFIG_IS_ENABLED(OF_PLATDATA)
Kever Yange54d26a2016-08-12 17:47:15 +08001391 struct rk3399_pmuclk_priv *priv = dev_get_priv(dev);
1392
Philipp Tomsich44d76842017-09-12 17:32:24 +02001393 priv->pmucru = dev_read_addr_ptr(dev);
Kever Yange1980532017-02-13 17:38:56 +08001394#endif
Kever Yange54d26a2016-08-12 17:47:15 +08001395 return 0;
1396}
1397
Elaine Zhang432976f2017-12-19 18:22:38 +08001398static int rk3399_pmuclk_bind(struct udevice *dev)
1399{
1400#if CONFIG_IS_ENABLED(CONFIG_RESET_ROCKCHIP)
1401 int ret;
1402
1403 ret = offsetof(struct rk3399_pmucru, pmucru_softrst_con[0]);
1404 ret = rockchip_reset_bind(dev, ret, 2);
1405 if (ret)
1406 debug("Warning: software reset driver bind faile\n");
1407#endif
1408 return 0;
1409}
1410
Kever Yange54d26a2016-08-12 17:47:15 +08001411static const struct udevice_id rk3399_pmuclk_ids[] = {
1412 { .compatible = "rockchip,rk3399-pmucru" },
1413 { }
1414};
1415
Simon Glassd1dfea72016-10-01 20:04:51 -06001416U_BOOT_DRIVER(rockchip_rk3399_pmuclk) = {
Kever Yange1980532017-02-13 17:38:56 +08001417 .name = "rockchip_rk3399_pmucru",
Kever Yange54d26a2016-08-12 17:47:15 +08001418 .id = UCLASS_CLK,
1419 .of_match = rk3399_pmuclk_ids,
1420 .priv_auto_alloc_size = sizeof(struct rk3399_pmuclk_priv),
1421 .ofdata_to_platdata = rk3399_pmuclk_ofdata_to_platdata,
1422 .ops = &rk3399_pmuclk_ops,
1423 .probe = rk3399_pmuclk_probe,
Elaine Zhang432976f2017-12-19 18:22:38 +08001424 .bind = rk3399_pmuclk_bind,
Kever Yange1980532017-02-13 17:38:56 +08001425#if CONFIG_IS_ENABLED(OF_PLATDATA)
1426 .platdata_auto_alloc_size = sizeof(struct rk3399_pmuclk_plat),
1427#endif
Kever Yange54d26a2016-08-12 17:47:15 +08001428};