blob: e7919337eae792d676657d6821c82dce911e0f4f [file] [log] [blame]
Kever Yang6fc9ebf2018-12-20 11:33:42 +08001// SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
Kever Yang13856742017-06-23 16:11:07 +08002/*
3 * (C) Copyright 2017 Rockchip Electronics Co., Ltd.
Kever Yang13856742017-06-23 16:11:07 +08004 */
Kever Yang13856742017-06-23 16:11:07 +08005#include <common.h>
Kever Yang9228f282019-08-02 10:39:59 +03006#include <clk.h>
7#include <debug_uart.h>
Kever Yang13856742017-06-23 16:11:07 +08008#include <dm.h>
Kever Yang9228f282019-08-02 10:39:59 +03009#include <dt-structs.h>
Kever Yang13856742017-06-23 16:11:07 +080010#include <ram.h>
Kever Yang9228f282019-08-02 10:39:59 +030011#include <regmap.h>
Kever Yang13856742017-06-23 16:11:07 +080012#include <syscon.h>
Kever Yang9228f282019-08-02 10:39:59 +030013#include <asm/io.h>
Kever Yang9fbe17c2019-03-28 11:01:23 +080014#include <asm/arch-rockchip/clock.h>
Kever Yang9228f282019-08-02 10:39:59 +030015#include <asm/arch-rockchip/cru_rk3328.h>
Kever Yang9fbe17c2019-03-28 11:01:23 +080016#include <asm/arch-rockchip/grf_rk3328.h>
Kever Yange47db832019-11-15 11:04:33 +080017#include <asm/arch-rockchip/sdram.h>
Kever Yang9228f282019-08-02 10:39:59 +030018#include <asm/arch-rockchip/sdram_rk3328.h>
19#include <asm/arch-rockchip/uart.h>
Kever Yang13856742017-06-23 16:11:07 +080020
Kever Yang13856742017-06-23 16:11:07 +080021struct dram_info {
Kever Yang9228f282019-08-02 10:39:59 +030022#ifdef CONFIG_TPL_BUILD
23 struct rk3328_ddr_pctl_regs *pctl;
24 struct rk3328_ddr_phy_regs *phy;
25 struct clk ddr_clk;
26 struct rk3328_cru *cru;
27 struct rk3328_msch_regs *msch;
28 struct rk3328_ddr_grf_regs *ddr_grf;
29#endif
Kever Yang13856742017-06-23 16:11:07 +080030 struct ram_info info;
31 struct rk3328_grf_regs *grf;
32};
33
Kever Yang9228f282019-08-02 10:39:59 +030034#ifdef CONFIG_TPL_BUILD
35
36struct rk3328_sdram_channel sdram_ch;
37
38struct rockchip_dmc_plat {
39#if CONFIG_IS_ENABLED(OF_PLATDATA)
40 struct dtd_rockchip_rk3328_dmc dtplat;
41#else
42 struct rk3328_sdram_params sdram_params;
43#endif
44 struct regmap *map;
45};
46
47#if CONFIG_IS_ENABLED(OF_PLATDATA)
48static int conv_of_platdata(struct udevice *dev)
49{
50 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
51 struct dtd_rockchip_rk3328_dmc *dtplat = &plat->dtplat;
52 int ret;
53
54 ret = regmap_init_mem_platdata(dev, dtplat->reg,
55 ARRAY_SIZE(dtplat->reg) / 2,
56 &plat->map);
57 if (ret)
58 return ret;
59
60 return 0;
61}
62#endif
63
64static void rkclk_ddr_reset(struct dram_info *dram,
65 u32 ctl_srstn, u32 ctl_psrstn,
66 u32 phy_srstn, u32 phy_psrstn)
67{
68 writel(ddrctrl_srstn_req(ctl_srstn) | ddrctrl_psrstn_req(ctl_psrstn) |
69 ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
70 &dram->cru->softrst_con[5]);
71 writel(ddrctrl_asrstn_req(ctl_srstn), &dram->cru->softrst_con[9]);
72}
73
74static void rkclk_set_dpll(struct dram_info *dram, unsigned int mhz)
75{
76 unsigned int refdiv, postdiv1, postdiv2, fbdiv;
77 int delay = 1000;
78
79 refdiv = 1;
80 if (mhz <= 300) {
81 postdiv1 = 4;
82 postdiv2 = 2;
83 } else if (mhz <= 400) {
84 postdiv1 = 6;
85 postdiv2 = 1;
86 } else if (mhz <= 600) {
87 postdiv1 = 4;
88 postdiv2 = 1;
89 } else if (mhz <= 800) {
90 postdiv1 = 3;
91 postdiv2 = 1;
92 } else if (mhz <= 1600) {
93 postdiv1 = 2;
94 postdiv2 = 1;
95 } else {
96 postdiv1 = 1;
97 postdiv2 = 1;
98 }
99 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
100
101 writel(((0x1 << 4) << 16) | (0 << 4), &dram->cru->mode_con);
102 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->dpll_con[0]);
103 writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
104 &dram->cru->dpll_con[1]);
105
106 while (delay > 0) {
107 udelay(1);
108 if (LOCK(readl(&dram->cru->dpll_con[1])))
109 break;
110 delay--;
111 }
112
113 writel(((0x1 << 4) << 16) | (1 << 4), &dram->cru->mode_con);
114}
115
116static void rkclk_configure_ddr(struct dram_info *dram,
117 struct rk3328_sdram_params *sdram_params)
118{
119 void __iomem *phy_base = dram->phy;
120
121 /* choose DPLL for ddr clk source */
122 clrbits_le32(PHY_REG(phy_base, 0xef), 1 << 7);
123
124 /* for inno ddr phy need 2*freq */
125 rkclk_set_dpll(dram, sdram_params->ddr_freq * 2);
126}
127
128static void phy_soft_reset(struct dram_info *dram)
129{
130 void __iomem *phy_base = dram->phy;
131
132 clrbits_le32(PHY_REG(phy_base, 0), 0x3 << 2);
133 udelay(1);
134 setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET);
135 udelay(5);
136 setbits_le32(PHY_REG(phy_base, 0), DIGITAL_DERESET);
137 udelay(1);
138}
139
140static int pctl_cfg(struct dram_info *dram,
141 struct rk3328_sdram_params *sdram_params)
142{
143 u32 i;
144 void __iomem *pctl_base = dram->pctl;
145
146 for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
147 writel(sdram_params->pctl_regs.pctl[i][1],
148 pctl_base + sdram_params->pctl_regs.pctl[i][0]);
149 }
150 clrsetbits_le32(pctl_base + DDR_PCTL2_PWRTMG,
151 (0xff << 16) | 0x1f,
152 ((SR_IDLE & 0xff) << 16) | (PD_IDLE & 0x1f));
153 /*
154 * dfi_lp_en_pd=1,dfi_lp_wakeup_pd=2
155 * hw_lp_idle_x32=1
156 */
157 if (sdram_params->dramtype == LPDDR3) {
158 setbits_le32(pctl_base + DDR_PCTL2_DFILPCFG0, 1);
159 clrsetbits_le32(pctl_base + DDR_PCTL2_DFILPCFG0,
160 0xf << 4,
161 2 << 4);
162 }
163 clrsetbits_le32(pctl_base + DDR_PCTL2_HWLPCTL,
164 0xfff << 16,
165 1 << 16);
166 /* disable zqcs */
167 setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1u << 31);
168 setbits_le32(pctl_base + 0x2000 + DDR_PCTL2_ZQCTL0, 1u << 31);
169
170 return 0;
171}
172
173/* return ddrconfig value
174 * (-1), find ddrconfig fail
175 * other, the ddrconfig value
176 * only support cs0_row >= cs1_row
177 */
178static unsigned int calculate_ddrconfig(struct rk3328_sdram_params *sdram_params)
179{
180 static const u16 ddr_cfg_2_rbc[] = {
181 /***************************
182 * [5:4] row(13+n)
183 * [3] cs(0:0 cs, 1:2 cs)
184 * [2] bank(0:0bank,1:8bank)
185 * [1:0] col(11+n)
186 ****************************/
187 /* row, cs, bank, col */
188 ((3 << 4) | (0 << 3) | (1 << 2) | 0),
189 ((3 << 4) | (0 << 3) | (1 << 2) | 1),
190 ((2 << 4) | (0 << 3) | (1 << 2) | 2),
191 ((3 << 4) | (0 << 3) | (1 << 2) | 2),
192 ((2 << 4) | (0 << 3) | (1 << 2) | 3),
193 ((3 << 4) | (1 << 3) | (1 << 2) | 0),
194 ((3 << 4) | (1 << 3) | (1 << 2) | 1),
195 ((2 << 4) | (1 << 3) | (1 << 2) | 2),
196 ((3 << 4) | (0 << 3) | (0 << 2) | 1),
197 ((2 << 4) | (0 << 3) | (1 << 2) | 1),
198 };
199
200 static const u16 ddr4_cfg_2_rbc[] = {
201 /***************************
202 * [6] cs 0:0cs 1:2 cs
203 * [5:3] row(13+n)
204 * [2] cs(0:0 cs, 1:2 cs)
205 * [1] bw 0: 16bit 1:32bit
206 * [0] diebw 0:8bit 1:16bit
207 ***************************/
208 /* cs, row, cs, bw, diebw */
209 ((0 << 6) | (3 << 3) | (0 << 2) | (1 << 1) | 0),
210 ((1 << 6) | (2 << 3) | (0 << 2) | (1 << 1) | 0),
211 ((0 << 6) | (4 << 3) | (0 << 2) | (0 << 1) | 0),
212 ((1 << 6) | (3 << 3) | (0 << 2) | (0 << 1) | 0),
213 ((0 << 6) | (4 << 3) | (0 << 2) | (1 << 1) | 1),
214 ((1 << 6) | (3 << 3) | (0 << 2) | (1 << 1) | 1),
215 ((1 << 6) | (4 << 3) | (0 << 2) | (0 << 1) | 1),
216 ((0 << 6) | (2 << 3) | (1 << 2) | (1 << 1) | 0),
217 ((0 << 6) | (3 << 3) | (1 << 2) | (0 << 1) | 0),
218 ((0 << 6) | (3 << 3) | (1 << 2) | (1 << 1) | 1),
219 ((0 << 6) | (4 << 3) | (1 << 2) | (0 << 1) | 1),
220 };
221
222 u32 cs, bw, die_bw, col, row, bank;
223 u32 i, tmp;
224 u32 ddrconf = -1;
225
226 cs = sdram_ch.rank;
227 bw = sdram_ch.bw;
228 die_bw = sdram_ch.dbw;
229 col = sdram_ch.col;
230 row = sdram_ch.cs0_row;
231 bank = sdram_ch.bk;
232
233 if (sdram_params->dramtype == DDR4) {
234 tmp = ((cs - 1) << 6) | ((row - 13) << 3) | (bw & 0x2) | die_bw;
235 for (i = 10; i < 17; i++) {
236 if (((tmp & 0x7) == (ddr4_cfg_2_rbc[i - 10] & 0x7)) &&
237 ((tmp & 0x3c) <= (ddr4_cfg_2_rbc[i - 10] & 0x3c)) &&
238 ((tmp & 0x40) <= (ddr4_cfg_2_rbc[i - 10] & 0x40))) {
239 ddrconf = i;
240 goto out;
241 }
242 }
243 } else {
244 if (bank == 2) {
245 ddrconf = 8;
246 goto out;
247 }
248
249 tmp = ((row - 13) << 4) | (1 << 2) | ((bw + col - 11) << 0);
250 for (i = 0; i < 5; i++)
251 if (((tmp & 0xf) == (ddr_cfg_2_rbc[i] & 0xf)) &&
252 ((tmp & 0x30) <= (ddr_cfg_2_rbc[i] & 0x30))) {
253 ddrconf = i;
254 goto out;
255 }
256 }
257
258out:
259 if (ddrconf > 20)
260 printf("calculate_ddrconfig error\n");
261
262 return ddrconf;
263}
264
265/* n: Unit bytes */
266static void copy_to_reg(u32 *dest, u32 *src, u32 n)
267{
268 int i;
269
270 for (i = 0; i < n / sizeof(u32); i++) {
271 writel(*src, dest);
272 src++;
273 dest++;
274 }
275}
276
277/*******
278 * calculate controller dram address map, and setting to register.
279 * argument sdram_ch.ddrconf must be right value before
280 * call this function.
281 *******/
282static void set_ctl_address_map(struct dram_info *dram,
283 struct rk3328_sdram_params *sdram_params)
284{
285 void __iomem *pctl_base = dram->pctl;
286
287 copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
288 &addrmap[sdram_ch.ddrconfig][0], 9 * 4);
289 if (sdram_params->dramtype == LPDDR3 && sdram_ch.row_3_4)
290 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
291 if (sdram_params->dramtype == DDR4 && sdram_ch.bw == 0x1)
292 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
293
294 if (sdram_ch.rank == 1)
295 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
296}
297
298static void phy_dll_bypass_set(struct dram_info *dram, u32 freq)
299{
300 u32 tmp;
301 void __iomem *phy_base = dram->phy;
302
303 setbits_le32(PHY_REG(phy_base, 0x13), 1 << 4);
304 clrbits_le32(PHY_REG(phy_base, 0x14), 1 << 3);
305 setbits_le32(PHY_REG(phy_base, 0x26), 1 << 4);
306 clrbits_le32(PHY_REG(phy_base, 0x27), 1 << 3);
307 setbits_le32(PHY_REG(phy_base, 0x36), 1 << 4);
308 clrbits_le32(PHY_REG(phy_base, 0x37), 1 << 3);
309 setbits_le32(PHY_REG(phy_base, 0x46), 1 << 4);
310 clrbits_le32(PHY_REG(phy_base, 0x47), 1 << 3);
311 setbits_le32(PHY_REG(phy_base, 0x56), 1 << 4);
312 clrbits_le32(PHY_REG(phy_base, 0x57), 1 << 3);
313
Simon South03b90fc2019-10-06 12:28:13 -0400314 if (freq <= 400)
Kever Yang9228f282019-08-02 10:39:59 +0300315 /* DLL bypass */
316 setbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
317 else
318 clrbits_le32(PHY_REG(phy_base, 0xa4), 0x1f);
Simon South03b90fc2019-10-06 12:28:13 -0400319 if (freq <= 680)
Kever Yang9228f282019-08-02 10:39:59 +0300320 tmp = 2;
321 else
322 tmp = 1;
323 writel(tmp, PHY_REG(phy_base, 0x28));
324 writel(tmp, PHY_REG(phy_base, 0x38));
325 writel(tmp, PHY_REG(phy_base, 0x48));
326 writel(tmp, PHY_REG(phy_base, 0x58));
327}
328
329static void set_ds_odt(struct dram_info *dram,
330 struct rk3328_sdram_params *sdram_params)
331{
332 u32 cmd_drv, clk_drv, dqs_drv, dqs_odt;
333 void __iomem *phy_base = dram->phy;
334
335 if (sdram_params->dramtype == DDR3) {
336 cmd_drv = PHY_DDR3_RON_RTT_34ohm;
337 clk_drv = PHY_DDR3_RON_RTT_45ohm;
338 dqs_drv = PHY_DDR3_RON_RTT_34ohm;
339 dqs_odt = PHY_DDR3_RON_RTT_225ohm;
340 } else {
341 cmd_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
342 clk_drv = PHY_DDR4_LPDDR3_RON_RTT_43ohm;
343 dqs_drv = PHY_DDR4_LPDDR3_RON_RTT_34ohm;
344 dqs_odt = PHY_DDR4_LPDDR3_RON_RTT_240ohm;
345 }
346 /* DS */
347 writel(cmd_drv, PHY_REG(phy_base, 0x11));
348 clrsetbits_le32(PHY_REG(phy_base, 0x12), 0x1f << 3, cmd_drv << 3);
349 writel(clk_drv, PHY_REG(phy_base, 0x16));
350 writel(clk_drv, PHY_REG(phy_base, 0x18));
351 writel(dqs_drv, PHY_REG(phy_base, 0x20));
352 writel(dqs_drv, PHY_REG(phy_base, 0x2f));
353 writel(dqs_drv, PHY_REG(phy_base, 0x30));
354 writel(dqs_drv, PHY_REG(phy_base, 0x3f));
355 writel(dqs_drv, PHY_REG(phy_base, 0x40));
356 writel(dqs_drv, PHY_REG(phy_base, 0x4f));
357 writel(dqs_drv, PHY_REG(phy_base, 0x50));
358 writel(dqs_drv, PHY_REG(phy_base, 0x5f));
359 /* ODT */
360 writel(dqs_odt, PHY_REG(phy_base, 0x21));
361 writel(dqs_odt, PHY_REG(phy_base, 0x2e));
362 writel(dqs_odt, PHY_REG(phy_base, 0x31));
363 writel(dqs_odt, PHY_REG(phy_base, 0x3e));
364 writel(dqs_odt, PHY_REG(phy_base, 0x41));
365 writel(dqs_odt, PHY_REG(phy_base, 0x4e));
366 writel(dqs_odt, PHY_REG(phy_base, 0x51));
367 writel(dqs_odt, PHY_REG(phy_base, 0x5e));
368}
369
370static void phy_cfg(struct dram_info *dram,
371 struct rk3328_sdram_params *sdram_params)
372{
373 u32 i;
374 void __iomem *phy_base = dram->phy;
375
376 phy_dll_bypass_set(dram, sdram_params->ddr_freq);
377 for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
378 writel(sdram_params->phy_regs.phy[i][1],
379 phy_base + sdram_params->phy_regs.phy[i][0]);
380 }
381 if (sdram_ch.bw == 2) {
382 clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 0xf << 4);
383 } else {
384 clrsetbits_le32(PHY_REG(phy_base, 0), 0xf << 4, 3 << 4);
385 /* disable DQS2,DQS3 tx dll for saving power */
386 clrbits_le32(PHY_REG(phy_base, 0x46), 1 << 3);
387 clrbits_le32(PHY_REG(phy_base, 0x56), 1 << 3);
388 }
389 set_ds_odt(dram, sdram_params);
390 /* deskew */
391 setbits_le32(PHY_REG(phy_base, 2), 8);
392 copy_to_reg(PHY_REG(phy_base, 0xb0),
393 &sdram_params->skew.a0_a1_skew[0], 15 * 4);
394 copy_to_reg(PHY_REG(phy_base, 0x70),
395 &sdram_params->skew.cs0_dm0_skew[0], 44 * 4);
396 copy_to_reg(PHY_REG(phy_base, 0xc0),
Simon Southec2b9fe2019-10-06 12:28:14 -0400397 &sdram_params->skew.cs1_dm0_skew[0], 44 * 4);
Kever Yang9228f282019-08-02 10:39:59 +0300398}
399
400static int update_refresh_reg(struct dram_info *dram)
401{
402 void __iomem *pctl_base = dram->pctl;
403 u32 ret;
404
405 ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
406 writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
407
408 return 0;
409}
410
411static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
412{
413 u32 ret;
414 u32 dis_auto_zq = 0;
415 void __iomem *pctl_base = dram->pctl;
416 void __iomem *phy_base = dram->phy;
417
418 /* disable zqcs */
419 if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
420 (1ul << 31))) {
421 dis_auto_zq = 1;
422 setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
423 }
424 /* disable auto refresh */
425 setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
426 update_refresh_reg(dram);
427
428 if (dramtype == DDR4) {
429 clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0);
430 clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0);
431 clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0);
432 clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0);
433 }
434 /* choose training cs */
435 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
436 /* enable gate training */
437 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
438 udelay(50);
439 ret = readl(PHY_REG(phy_base, 0xff));
440 /* disable gate training */
441 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
442 /* restore zqcs */
443 if (dis_auto_zq)
444 clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
445 /* restore auto refresh */
446 clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
447 update_refresh_reg(dram);
448
449 if (dramtype == DDR4) {
450 clrsetbits_le32(PHY_REG(phy_base, 0x29), 0x3, 0x2);
451 clrsetbits_le32(PHY_REG(phy_base, 0x39), 0x3, 0x2);
452 clrsetbits_le32(PHY_REG(phy_base, 0x49), 0x3, 0x2);
453 clrsetbits_le32(PHY_REG(phy_base, 0x59), 0x3, 0x2);
454 }
455
456 if (ret & 0x10) {
457 ret = -1;
458 } else {
459 ret = (ret & 0xf) ^ (readl(PHY_REG(phy_base, 0)) >> 4);
460 ret = (ret == 0) ? 0 : -1;
461 }
462 return ret;
463}
464
465/* rank = 1: cs0
466 * rank = 2: cs1
467 * rank = 3: cs0 & cs1
468 * note: be careful of keep mr original val
469 */
470static int write_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 arg,
471 u32 dramtype)
472{
473 void __iomem *pctl_base = dram->pctl;
474
475 while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
476 continue;
477 if (dramtype == DDR3 || dramtype == DDR4) {
478 writel((mr_num << 12) | (rank << 4) | (0 << 0),
479 pctl_base + DDR_PCTL2_MRCTRL0);
480 writel(arg, pctl_base + DDR_PCTL2_MRCTRL1);
481 } else {
482 writel((rank << 4) | (0 << 0),
483 pctl_base + DDR_PCTL2_MRCTRL0);
484 writel((mr_num << 8) | (arg & 0xff),
485 pctl_base + DDR_PCTL2_MRCTRL1);
486 }
487
488 setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
489 while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
490 continue;
491 while (readl(pctl_base + DDR_PCTL2_MRSTAT) & MR_WR_BUSY)
492 continue;
493
494 return 0;
495}
496
497/*
498 * rank : 1:cs0, 2:cs1, 3:cs0&cs1
499 * vrefrate: 4500: 45%,
500 */
501static int write_vrefdq(struct dram_info *dram, u32 rank, u32 vrefrate,
502 u32 dramtype)
503{
504 u32 tccd_l, value;
505 u32 dis_auto_zq = 0;
506 void __iomem *pctl_base = dram->pctl;
507
508 if (dramtype != DDR4 || vrefrate < 4500 || vrefrate > 9200)
509 return -1;
510
511 tccd_l = (readl(pctl_base + DDR_PCTL2_DRAMTMG4) >> 16) & 0xf;
512 tccd_l = (tccd_l - 4) << 10;
513
514 if (vrefrate > 7500) {
515 /* range 1 */
516 value = ((vrefrate - 6000) / 65) | tccd_l;
517 } else {
518 /* range 2 */
519 value = ((vrefrate - 4500) / 65) | tccd_l | (1 << 6);
520 }
521
522 /* disable zqcs */
523 if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
524 (1ul << 31))) {
525 dis_auto_zq = 1;
526 setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
527 }
528 /* disable auto refresh */
529 setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
530 update_refresh_reg(dram);
531
532 /* enable vrefdq calibratin */
533 write_mr(dram, rank, 6, value | (1 << 7), dramtype);
534 udelay(1);/* tvrefdqe */
535 /* write vrefdq value */
536 write_mr(dram, rank, 6, value | (1 << 7), dramtype);
537 udelay(1);/* tvref_time */
538 write_mr(dram, rank, 6, value | (0 << 7), dramtype);
539 udelay(1);/* tvrefdqx */
540
541 /* restore zqcs */
542 if (dis_auto_zq)
543 clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
544 /* restore auto refresh */
545 clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
546 update_refresh_reg(dram);
547
548 return 0;
549}
550
551#define _MAX_(x, y) ((x) > (y) ? (x) : (y))
552
553static void rx_deskew_switch_adjust(struct dram_info *dram)
554{
555 u32 i, deskew_val;
556 u32 gate_val = 0;
557 void __iomem *phy_base = dram->phy;
558
559 for (i = 0; i < 4; i++)
560 gate_val = _MAX_(readl(PHY_REG(phy_base, 0xfb + i)), gate_val);
561
562 deskew_val = (gate_val >> 3) + 1;
563 deskew_val = (deskew_val > 0x1f) ? 0x1f : deskew_val;
564 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xc, (deskew_val & 0x3) << 2);
565 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x7 << 4,
566 (deskew_val & 0x1c) << 2);
567}
568
569#undef _MAX_
570
571static void tx_deskew_switch_adjust(struct dram_info *dram)
572{
573 void __iomem *phy_base = dram->phy;
574
575 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0x3, 1);
576}
577
578static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
579{
580 writel(ddrconfig, &dram->msch->ddrconf);
581}
582
583static void dram_all_config(struct dram_info *dram,
584 struct rk3328_sdram_params *sdram_params)
585{
586 u32 sys_reg = 0, tmp = 0;
587
588 set_ddrconfig(dram, sdram_ch.ddrconfig);
589
590 sys_reg |= SYS_REG_ENC_DDRTYPE(sdram_params->dramtype);
591 sys_reg |= SYS_REG_ENC_ROW_3_4(sdram_ch.row_3_4, 0);
592 sys_reg |= SYS_REG_ENC_RANK(sdram_ch.rank, 0);
593 sys_reg |= SYS_REG_ENC_COL(sdram_ch.col, 0);
594 sys_reg |= SYS_REG_ENC_BK(sdram_ch.bk, 0);
595 SYS_REG_ENC_CS0_ROW(sdram_ch.cs0_row, sys_reg, tmp, 0);
596 if (sdram_ch.cs1_row)
597 SYS_REG_ENC_CS1_ROW(sdram_ch.cs1_row, sys_reg, tmp, 0);
598 sys_reg |= SYS_REG_ENC_BW(sdram_ch.bw, 0);
599 sys_reg |= SYS_REG_ENC_DBW(sdram_ch.dbw, 0);
600
601 writel(sys_reg, &dram->grf->os_reg[2]);
602
603 writel(sdram_ch.noc_timings.ddrtiming.d32, &dram->msch->ddrtiming);
604
605 writel(sdram_ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
606 writel(sdram_ch.noc_timings.readlatency, &dram->msch->readlatency);
607
608 writel(sdram_ch.noc_timings.activate.d32, &dram->msch->activate);
609 writel(sdram_ch.noc_timings.devtodev.d32, &dram->msch->devtodev);
610 writel(sdram_ch.noc_timings.ddr4timing.d32, &dram->msch->ddr4_timing);
611 writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging0);
612 writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging1);
613 writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging2);
614 writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging3);
615 writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging4);
616 writel(sdram_ch.noc_timings.agingx0, &dram->msch->aging5);
617}
618
619static void enable_low_power(struct dram_info *dram,
620 struct rk3328_sdram_params *sdram_params)
621{
622 void __iomem *pctl_base = dram->pctl;
623
624 /* enable upctl2 axi clock auto gating */
625 writel(0x00800000, &dram->ddr_grf->ddr_grf_con[0]);
626 writel(0x20012001, &dram->ddr_grf->ddr_grf_con[2]);
627 /* enable upctl2 core clock auto gating */
628 writel(0x001e001a, &dram->ddr_grf->ddr_grf_con[2]);
629 /* enable sr, pd */
630 if (PD_IDLE == 0)
631 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
632 else
633 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
634 if (SR_IDLE == 0)
635 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
636 else
637 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
638 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
639}
640
641static int sdram_init(struct dram_info *dram,
642 struct rk3328_sdram_params *sdram_params, u32 pre_init)
643{
644 void __iomem *pctl_base = dram->pctl;
645
646 rkclk_ddr_reset(dram, 1, 1, 1, 1);
647 udelay(10);
648 /*
649 * dereset ddr phy psrstn to config pll,
650 * if using phy pll psrstn must be dereset
651 * before config pll
652 */
653 rkclk_ddr_reset(dram, 1, 1, 1, 0);
654 rkclk_configure_ddr(dram, sdram_params);
655 if (pre_init == 0) {
656 switch (sdram_params->dramtype) {
657 case DDR3:
658 printf("DDR3\n");
659 break;
660 case DDR4:
661 printf("DDR4\n");
662 break;
663 case LPDDR3:
664 default:
665 printf("LPDDR3\n");
666 break;
667 }
668 }
669 /* release phy srst to provide clk to ctrl */
670 rkclk_ddr_reset(dram, 1, 1, 0, 0);
671 udelay(10);
672 phy_soft_reset(dram);
673 /* release ctrl presetn, and config ctl registers */
674 rkclk_ddr_reset(dram, 1, 0, 0, 0);
675 pctl_cfg(dram, sdram_params);
676 sdram_ch.ddrconfig = calculate_ddrconfig(sdram_params);
677 set_ctl_address_map(dram, sdram_params);
678 phy_cfg(dram, sdram_params);
679
680 /* enable dfi_init_start to init phy after ctl srstn deassert */
681 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
682 rkclk_ddr_reset(dram, 0, 0, 0, 0);
683 /* wait for dfi_init_done and dram init complete */
684 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
685 continue;
686
687 /* do ddr gate training */
688 if (data_training(dram, 0, sdram_params->dramtype) != 0) {
689 printf("data training error\n");
690 return -1;
691 }
692
693 if (sdram_params->dramtype == DDR4)
694 write_vrefdq(dram, 0x3, 5670, sdram_params->dramtype);
695
696 if (pre_init == 0) {
697 rx_deskew_switch_adjust(dram);
698 tx_deskew_switch_adjust(dram);
699 }
700
701 dram_all_config(dram, sdram_params);
702 enable_low_power(dram, sdram_params);
703
704 return 0;
705}
706
707static u64 dram_detect_cap(struct dram_info *dram,
708 struct rk3328_sdram_params *sdram_params,
709 unsigned char channel)
710{
711 void __iomem *pctl_base = dram->pctl;
712
713 /*
714 * for ddr3: ddrconf = 3
715 * for ddr4: ddrconf = 12
716 * for lpddr3: ddrconf = 3
717 * default bw = 1
718 */
719 u32 bk, bktmp;
720 u32 col, coltmp;
721 u32 row, rowtmp, row_3_4;
722 void __iomem *test_addr, *test_addr1;
723 u32 dbw;
724 u32 cs;
725 u32 bw = 1;
726 u64 cap = 0;
727 u32 dram_type = sdram_params->dramtype;
728 u32 pwrctl;
729
730 if (dram_type != DDR4) {
731 /* detect col and bk for ddr3/lpddr3 */
732 coltmp = 12;
733 bktmp = 3;
734 rowtmp = 16;
735
736 for (col = coltmp; col >= 9; col -= 1) {
737 writel(0, SDRAM_ADDR);
738 test_addr = (void __iomem *)(SDRAM_ADDR +
739 (1ul << (col + bw - 1ul)));
740 writel(PATTERN, test_addr);
741 if ((readl(test_addr) == PATTERN) &&
742 (readl(SDRAM_ADDR) == 0))
743 break;
744 }
745 if (col == 8) {
746 printf("col error\n");
747 goto cap_err;
748 }
749
750 test_addr = (void __iomem *)(SDRAM_ADDR +
751 (1ul << (coltmp + bktmp + bw - 1ul)));
752 writel(0, SDRAM_ADDR);
753 writel(PATTERN, test_addr);
754 if ((readl(test_addr) == PATTERN) &&
755 (readl(SDRAM_ADDR) == 0))
756 bk = 3;
757 else
758 bk = 2;
759 if (dram_type == LPDDR3)
760 dbw = 2;
761 else
762 dbw = 1;
763 } else {
764 /* detect bg for ddr4 */
765 coltmp = 10;
766 bktmp = 4;
767 rowtmp = 17;
768
769 col = 10;
770 bk = 2;
771 test_addr = (void __iomem *)(SDRAM_ADDR +
772 (1ul << (coltmp + bw + 1ul)));
773 writel(0, SDRAM_ADDR);
774 writel(PATTERN, test_addr);
775 if ((readl(test_addr) == PATTERN) &&
776 (readl(SDRAM_ADDR) == 0))
777 dbw = 0;
778 else
779 dbw = 1;
780 }
781 /* detect row */
782 for (row = rowtmp; row > 12; row--) {
783 writel(0, SDRAM_ADDR);
784 test_addr = (void __iomem *)(SDRAM_ADDR +
785 (1ul << (row + bktmp + coltmp + bw - 1ul)));
786 writel(PATTERN, test_addr);
787 if ((readl(test_addr) == PATTERN) &&
788 (readl(SDRAM_ADDR) == 0))
789 break;
790 }
791 if (row == 12) {
792 printf("row error");
793 goto cap_err;
794 }
795 /* detect row_3_4 */
796 test_addr = SDRAM_ADDR;
797 test_addr1 = (void __iomem *)(SDRAM_ADDR +
798 (0x3ul << (row + bktmp + coltmp + bw - 1ul - 1ul)));
799
800 writel(0, test_addr);
801 writel(PATTERN, test_addr1);
802 if ((readl(test_addr) == 0) &&
803 (readl(test_addr1) == PATTERN))
804 row_3_4 = 0;
805 else
806 row_3_4 = 1;
807
808 /* disable auto low-power */
809 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
810 writel(0, pctl_base + DDR_PCTL2_PWRCTL);
811
812 /* bw and cs detect using phy read gate training */
813 if (data_training(dram, 1, dram_type) == 0)
814 cs = 1;
815 else
816 cs = 0;
817
818 bw = 2;
819
820 /* restore auto low-power */
821 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
822
823 sdram_ch.rank = cs + 1;
824 sdram_ch.col = col;
825 sdram_ch.bk = bk;
826 sdram_ch.dbw = dbw;
827 sdram_ch.bw = bw;
828 sdram_ch.cs0_row = row;
829 if (cs)
830 sdram_ch.cs1_row = row;
831 else
832 sdram_ch.cs1_row = 0;
833 sdram_ch.row_3_4 = row_3_4;
834
835 if (dram_type == DDR4)
836 cap = 1llu << (cs + row + bk + col + ((dbw == 0) ? 2 : 1) + bw);
837 else
838 cap = 1llu << (cs + row + bk + col + bw);
839
840 return cap;
841
842cap_err:
843 return 0;
844}
845
846static u32 remodify_sdram_params(struct rk3328_sdram_params *sdram_params)
847{
848 u32 tmp = 0, tmp_adr = 0, i;
849
850 for (i = 0; sdram_params->pctl_regs.pctl[i][0] != 0xFFFFFFFF; i++) {
851 if (sdram_params->pctl_regs.pctl[i][0] == 0) {
852 tmp = sdram_params->pctl_regs.pctl[i][1];/* MSTR */
853 tmp_adr = i;
854 }
855 }
856
857 tmp &= ~((3ul << 30) | (3ul << 24) | (3ul << 12));
858
859 switch (sdram_ch.dbw) {
860 case 2:
861 tmp |= (3ul << 30);
862 break;
863 case 1:
864 tmp |= (2ul << 30);
865 break;
866 case 0:
867 default:
868 tmp |= (1ul << 30);
869 break;
870 }
871
872 if (sdram_ch.rank == 2)
873 tmp |= 3 << 24;
874 else
875 tmp |= 1 << 24;
876
877 tmp |= (2 - sdram_ch.bw) << 12;
878
879 sdram_params->pctl_regs.pctl[tmp_adr][1] = tmp;
880
881 if (sdram_ch.bw == 2)
882 sdram_ch.noc_timings.ddrtiming.b.bwratio = 0;
883 else
884 sdram_ch.noc_timings.ddrtiming.b.bwratio = 1;
885
886 return 0;
887}
888
889static int dram_detect_cs1_row(struct rk3328_sdram_params *sdram_params,
890 unsigned char channel)
891{
892 u32 ret = 0;
893 u32 cs1_bit;
894 void __iomem *test_addr, *cs1_addr;
895 u32 row, bktmp, coltmp, bw;
896 u32 ddrconf = sdram_ch.ddrconfig;
897
898 if (sdram_ch.rank == 2) {
899 cs1_bit = addrmap[ddrconf][0] + 8;
900
901 if (cs1_bit > 31)
902 goto out;
903
904 cs1_addr = (void __iomem *)(1ul << cs1_bit);
905 if (cs1_bit < 20)
906 cs1_bit = 1;
907 else
908 cs1_bit = 0;
909
910 if (sdram_params->dramtype == DDR4) {
911 if (sdram_ch.dbw == 0)
912 bktmp = sdram_ch.bk + 2;
913 else
914 bktmp = sdram_ch.bk + 1;
915 } else {
916 bktmp = sdram_ch.bk;
917 }
918 bw = sdram_ch.bw;
919 coltmp = sdram_ch.col;
920
921 /* detect cs1 row */
922 for (row = sdram_ch.cs0_row; row > 12; row--) {
923 test_addr = (void __iomem *)(SDRAM_ADDR + cs1_addr +
924 (1ul << (row + cs1_bit + bktmp +
925 coltmp + bw - 1ul)));
926 writel(0, SDRAM_ADDR + cs1_addr);
927 writel(PATTERN, test_addr);
928 if ((readl(test_addr) == PATTERN) &&
929 (readl(SDRAM_ADDR + cs1_addr) == 0)) {
930 ret = row;
931 break;
932 }
933 }
934 }
935
936out:
937 return ret;
938}
939
940static int sdram_init_detect(struct dram_info *dram,
941 struct rk3328_sdram_params *sdram_params)
942{
943 debug("Starting SDRAM initialization...\n");
944
945 memcpy(&sdram_ch, &sdram_params->ch,
946 sizeof(struct rk3328_sdram_channel));
947
948 sdram_init(dram, sdram_params, 1);
949 dram_detect_cap(dram, sdram_params, 0);
950
951 /* modify bw, cs related timing */
952 remodify_sdram_params(sdram_params);
953 /* reinit sdram by real dram cap */
954 sdram_init(dram, sdram_params, 0);
955
956 /* redetect cs1 row */
957 sdram_ch.cs1_row =
958 dram_detect_cs1_row(sdram_params, 0);
959
960 return 0;
961}
962
963static int rk3328_dmc_init(struct udevice *dev)
964{
965 struct dram_info *priv = dev_get_priv(dev);
966 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
967 int ret;
968
969#if !CONFIG_IS_ENABLED(OF_PLATDATA)
970 struct rk3328_sdram_params *params = &plat->sdram_params;
971#else
972 struct dtd_rockchip_rk3328_dmc *dtplat = &plat->dtplat;
973 struct rk3328_sdram_params *params =
974 (void *)dtplat->rockchip_sdram_params;
975
976 ret = conv_of_platdata(dev);
977 if (ret)
978 return ret;
979#endif
980 priv->phy = regmap_get_range(plat->map, 0);
981 priv->pctl = regmap_get_range(plat->map, 1);
982 priv->grf = regmap_get_range(plat->map, 2);
983 priv->cru = regmap_get_range(plat->map, 3);
984 priv->msch = regmap_get_range(plat->map, 4);
985 priv->ddr_grf = regmap_get_range(plat->map, 5);
986
987 debug("%s phy %p pctrl %p grf %p cru %p msch %p ddr_grf %p\n",
988 __func__, priv->phy, priv->pctl, priv->grf, priv->cru,
989 priv->msch, priv->ddr_grf);
990 ret = sdram_init_detect(priv, params);
991 if (ret < 0) {
992 printf("%s DRAM init failed%d\n", __func__, ret);
993 return ret;
994 }
995
996 return 0;
997}
998
999static int rk3328_dmc_ofdata_to_platdata(struct udevice *dev)
1000{
1001#if !CONFIG_IS_ENABLED(OF_PLATDATA)
1002 struct rockchip_dmc_plat *plat = dev_get_platdata(dev);
1003 int ret;
1004
1005 ret = dev_read_u32_array(dev, "rockchip,sdram-params",
1006 (u32 *)&plat->sdram_params,
1007 sizeof(plat->sdram_params) / sizeof(u32));
1008 if (ret) {
1009 printf("%s: Cannot read rockchip,sdram-params %d\n",
1010 __func__, ret);
1011 return ret;
1012 }
1013 ret = regmap_init_mem(dev, &plat->map);
1014 if (ret)
1015 printf("%s: regmap failed %d\n", __func__, ret);
1016#endif
1017 return 0;
1018}
1019
1020#endif
1021
Kever Yang13856742017-06-23 16:11:07 +08001022static int rk3328_dmc_probe(struct udevice *dev)
1023{
Kever Yang9228f282019-08-02 10:39:59 +03001024#ifdef CONFIG_TPL_BUILD
1025 if (rk3328_dmc_init(dev))
1026 return 0;
1027#else
Kever Yang13856742017-06-23 16:11:07 +08001028 struct dram_info *priv = dev_get_priv(dev);
1029
1030 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
1031 debug("%s: grf=%p\n", __func__, priv->grf);
1032 priv->info.base = CONFIG_SYS_SDRAM_BASE;
1033 priv->info.size = rockchip_sdram_size(
1034 (phys_addr_t)&priv->grf->os_reg[2]);
Kever Yang9228f282019-08-02 10:39:59 +03001035#endif
Kever Yang13856742017-06-23 16:11:07 +08001036 return 0;
1037}
1038
1039static int rk3328_dmc_get_info(struct udevice *dev, struct ram_info *info)
1040{
1041 struct dram_info *priv = dev_get_priv(dev);
1042
1043 *info = priv->info;
1044
1045 return 0;
1046}
1047
1048static struct ram_ops rk3328_dmc_ops = {
1049 .get_info = rk3328_dmc_get_info,
1050};
1051
Kever Yang13856742017-06-23 16:11:07 +08001052static const struct udevice_id rk3328_dmc_ids[] = {
1053 { .compatible = "rockchip,rk3328-dmc" },
1054 { }
1055};
1056
1057U_BOOT_DRIVER(dmc_rk3328) = {
1058 .name = "rockchip_rk3328_dmc",
1059 .id = UCLASS_RAM,
1060 .of_match = rk3328_dmc_ids,
1061 .ops = &rk3328_dmc_ops,
Kever Yang9228f282019-08-02 10:39:59 +03001062#ifdef CONFIG_TPL_BUILD
1063 .ofdata_to_platdata = rk3328_dmc_ofdata_to_platdata,
1064#endif
Kever Yang13856742017-06-23 16:11:07 +08001065 .probe = rk3328_dmc_probe,
1066 .priv_auto_alloc_size = sizeof(struct dram_info),
Kever Yang9228f282019-08-02 10:39:59 +03001067#ifdef CONFIG_TPL_BUILD
1068 .platdata_auto_alloc_size = sizeof(struct rockchip_dmc_plat),
1069#endif
Kever Yang13856742017-06-23 16:11:07 +08001070};