blob: 729255493af408cd76b4fd841f15dc25c1739166 [file] [log] [blame]
YouMin Chene8565362019-11-15 11:04:43 +08001// SPDX-License-Identifier: GPL-2.0
2/*
3 * (C) Copyright 2018 Rockchip Electronics Co., Ltd.
4 */
5
6#include <common.h>
7#include <debug_uart.h>
8#include <dm.h>
9#include <ram.h>
10#include <syscon.h>
11#include <asm/io.h>
12#include <asm/arch-rockchip/clock.h>
13#include <asm/arch-rockchip/cru_px30.h>
14#include <asm/arch-rockchip/grf_px30.h>
15#include <asm/arch-rockchip/hardware.h>
16#include <asm/arch-rockchip/sdram.h>
17#include <asm/arch-rockchip/sdram_px30.h>
18
19struct dram_info {
20#ifdef CONFIG_TPL_BUILD
21 struct ddr_pctl_regs *pctl;
22 struct ddr_phy_regs *phy;
23 struct px30_cru *cru;
24 struct msch_regs *msch;
25 struct px30_ddr_grf_regs *ddr_grf;
26 struct px30_grf *grf;
27#endif
28 struct ram_info info;
29 struct px30_pmugrf *pmugrf;
30};
31
32#ifdef CONFIG_TPL_BUILD
33
34u8 ddr_cfg_2_rbc[] = {
35 /*
36 * [6:4] max row: 13+n
37 * [3] bank(0:4bank,1:8bank)
38 * [2:0] col(10+n)
39 */
40 ((5 << 4) | (1 << 3) | 0), /* 0 */
41 ((5 << 4) | (1 << 3) | 1), /* 1 */
42 ((4 << 4) | (1 << 3) | 2), /* 2 */
43 ((3 << 4) | (1 << 3) | 3), /* 3 */
44 ((2 << 4) | (1 << 3) | 4), /* 4 */
45 ((5 << 4) | (0 << 3) | 2), /* 5 */
46 ((4 << 4) | (1 << 3) | 2), /* 6 */
47 /*((0<<3)|3),*/ /* 12 for ddr4 */
48 /*((1<<3)|1),*/ /* 13 B,C exchange for rkvdec */
49};
50
51/*
52 * for ddr4 if ddrconfig=7, upctl should set 7 and noc should
53 * set to 1 for more efficient.
54 * noc ddrconf, upctl addrmap
55 * 1 7
56 * 2 8
57 * 3 9
58 * 12 10
59 * 5 11
60 */
61u8 d4_rbc_2_d3_rbc[] = {
62 1, /* 7 */
63 2, /* 8 */
64 3, /* 9 */
65 12, /* 10 */
66 5, /* 11 */
67};
68
69/*
70 * row higher than cs should be disabled by set to 0xf
71 * rank addrmap calculate by real cap.
72 */
73u32 addrmap[][8] = {
74 /* map0 map1, map2, map3, map4, map5
75 * map6, map7, map8
76 * -------------------------------------------------------
77 * bk2-0 col 5-2 col 9-6 col 11-10 row 11-0
78 * row 15-12 row 17-16 bg1,0
79 * -------------------------------------------------------
80 * 4,3,2 5-2 9-6 6
81 * 3,2
82 */
83 {0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
84 0x05050505, 0x00000505, 0x3f3f}, /* 0 */
85 {0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
86 0x06060606, 0x06060606, 0x3f3f}, /* 1 */
87 {0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
88 0x07070707, 0x00000f07, 0x3f3f}, /* 2 */
89 {0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
90 0x08080808, 0x00000f0f, 0x3f3f}, /* 3 */
91 {0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
92 0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
93 {0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
94 0x06060606, 0x00000606, 0x3f3f}, /* 5 */
95 {0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
96 0x07070707, 0x00000f0f, 0x3f3f}, /* 6 */
97 {0x003f0808, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
98 0x06060606, 0x00000606, 0x0600}, /* 7 */
99 {0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
100 0x07070707, 0x00000f07, 0x0700}, /* 8 */
101 {0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
102 0x08080808, 0x00000f0f, 0x0801}, /* 9 */
103 {0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
104 0x07070707, 0x00000f07, 0x3f01}, /* 10 */
105 {0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
106 0x06060606, 0x00000606, 0x3f00}, /* 11 */
107 /* when ddr4 12 map to 10, when ddr3 12 unused */
108 {0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
109 0x07070707, 0x00000f07, 0x3f01}, /* 10 */
110 {0x00070706, 0x00000000, 0x1f010000, 0x00001f1f, 0x06060606,
111 0x06060606, 0x00000606, 0x3f3f}, /* 13 */
112};
113
114#define PMUGRF_BASE_ADDR 0xFF010000
115#define CRU_BASE_ADDR 0xFF2B0000
116#define GRF_BASE_ADDR 0xFF140000
117#define DDRC_BASE_ADDR 0xFF600000
118#define DDR_PHY_BASE_ADDR 0xFF2A0000
119#define SERVER_MSCH0_BASE_ADDR 0xFF530000
120#define DDR_GRF_BASE_ADDR 0xff630000
121
122struct dram_info dram_info;
123
124struct px30_sdram_params sdram_configs[] = {
125#include "sdram-px30-ddr3-detect-333.inc"
126};
127
128struct ddr_phy_skew skew = {
129#include "sdram-px30-ddr_skew.inc"
130};
131
132static void rkclk_ddr_reset(struct dram_info *dram,
133 u32 ctl_srstn, u32 ctl_psrstn,
134 u32 phy_srstn, u32 phy_psrstn)
135{
136 writel(upctl2_srstn_req(ctl_srstn) | upctl2_psrstn_req(ctl_psrstn) |
137 upctl2_asrstn_req(ctl_srstn),
138 &dram->cru->softrst_con[1]);
139 writel(ddrphy_srstn_req(phy_srstn) | ddrphy_psrstn_req(phy_psrstn),
140 &dram->cru->softrst_con[2]);
141}
142
143static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
144{
145 unsigned int refdiv, postdiv1, postdiv2, fbdiv;
146 int delay = 1000;
147 u32 mhz = hz / MHz;
148
149 refdiv = 1;
150 if (mhz <= 300) {
151 postdiv1 = 4;
152 postdiv2 = 2;
153 } else if (mhz <= 400) {
154 postdiv1 = 6;
155 postdiv2 = 1;
156 } else if (mhz <= 600) {
157 postdiv1 = 4;
158 postdiv2 = 1;
159 } else if (mhz <= 800) {
160 postdiv1 = 3;
161 postdiv2 = 1;
162 } else if (mhz <= 1600) {
163 postdiv1 = 2;
164 postdiv2 = 1;
165 } else {
166 postdiv1 = 1;
167 postdiv2 = 1;
168 }
169 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
170
171 writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
172
173 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
174 writel(DSMPD(1) | POSTDIV2(postdiv2) | REFDIV(refdiv),
175 &dram->cru->pll[1].con1);
176
177 while (delay > 0) {
178 udelay(1);
179 if (LOCK(readl(&dram->cru->pll[1].con1)))
180 break;
181 delay--;
182 }
183
184 writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
185}
186
187static void rkclk_configure_ddr(struct dram_info *dram,
188 struct px30_sdram_params *sdram_params)
189{
190 /* for inno ddr phy need 2*freq */
191 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHz * 2);
192}
193
194/* return ddrconfig value
195 * (-1), find ddrconfig fail
196 * other, the ddrconfig value
197 * only support cs0_row >= cs1_row
198 */
199static unsigned int calculate_ddrconfig(struct px30_sdram_params *sdram_params)
200{
201 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
202 u32 bw, die_bw, col, bank;
203 u32 i, tmp;
204 u32 ddrconf = -1;
205
206 bw = cap_info->bw;
207 die_bw = cap_info->dbw;
208 col = cap_info->col;
209 bank = cap_info->bk;
210
211 if (sdram_params->base.dramtype == DDR4) {
212 if (die_bw == 0)
213 ddrconf = 7 + bw;
214 else
215 ddrconf = 12 - bw;
216 ddrconf = d4_rbc_2_d3_rbc[ddrconf - 7];
217 } else {
218 tmp = ((bank - 2) << 3) | (col + bw - 10);
219 for (i = 0; i < 7; i++)
220 if ((ddr_cfg_2_rbc[i] & 0xf) == tmp) {
221 ddrconf = i;
222 break;
223 }
224 if (i > 6)
225 printascii("calculate ddrconfig error\n");
226 }
227
228 return ddrconf;
229}
230
231/*
232 * calculate controller dram address map, and setting to register.
233 * argument sdram_params->ch.ddrconf must be right value before
234 * call this function.
235 */
236static void set_ctl_address_map(struct dram_info *dram,
237 struct px30_sdram_params *sdram_params)
238{
239 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
240 void __iomem *pctl_base = dram->pctl;
241 u32 cs_pst, bg, max_row, ddrconf;
242 u32 i;
243
244 if (sdram_params->base.dramtype == DDR4)
245 /*
246 * DDR4 8bit dram BG = 2(4bank groups),
247 * 16bit dram BG = 1 (2 bank groups)
248 */
249 bg = (cap_info->dbw == 0) ? 2 : 1;
250 else
251 bg = 0;
252
253 cs_pst = cap_info->bw + cap_info->col +
254 bg + cap_info->bk + cap_info->cs0_row;
255 if (cs_pst >= 32 || cap_info->rank == 1)
256 writel(0x1f, pctl_base + DDR_PCTL2_ADDRMAP0);
257 else
258 writel(cs_pst - 8, pctl_base + DDR_PCTL2_ADDRMAP0);
259
260 ddrconf = cap_info->ddrconfig;
261 if (sdram_params->base.dramtype == DDR4) {
262 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc); i++) {
263 if (d4_rbc_2_d3_rbc[i] == ddrconf) {
264 ddrconf = 7 + i;
265 break;
266 }
267 }
268 }
269
270 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP1),
271 &addrmap[ddrconf][0], 8 * 4);
272 max_row = cs_pst - 1 - 8 - (addrmap[ddrconf][5] & 0xf);
273
274 if (max_row < 12)
275 printascii("set addrmap fail\n");
276 /* need to disable row ahead of rank by set to 0xf */
277 for (i = 17; i > max_row; i--)
278 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
279 ((i - 12) * 8 / 32) * 4,
280 0xf << ((i - 12) * 8 % 32),
281 0xf << ((i - 12) * 8 % 32));
282
283 if ((sdram_params->base.dramtype == LPDDR3 ||
284 sdram_params->base.dramtype == LPDDR2) &&
285 cap_info->row_3_4)
286 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
287 if (sdram_params->base.dramtype == DDR4 && cap_info->bw != 0x2)
288 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
289}
290
291/*
292 * rank = 1: cs0
293 * rank = 2: cs1
294 */
295int read_mr(struct dram_info *dram, u32 rank, u32 mr_num)
296{
297 void __iomem *ddr_grf_base = dram->ddr_grf;
298
299 pctl_read_mr(dram->pctl, rank, mr_num);
300
301 return (readl(ddr_grf_base + DDR_GRF_STATUS(0)) & 0xff);
302}
303
304#define MIN(a, b) (((a) > (b)) ? (b) : (a))
305#define MAX(a, b) (((a) > (b)) ? (a) : (b))
306static u32 check_rd_gate(struct dram_info *dram)
307{
308 void __iomem *phy_base = dram->phy;
309
310 u32 max_val = 0;
311 u32 min_val = 0xff;
312 u32 gate[4];
313 u32 i, bw;
314
315 bw = (readl(PHY_REG(phy_base, 0x0)) >> 4) & 0xf;
316 switch (bw) {
317 case 0x1:
318 bw = 1;
319 break;
320 case 0x3:
321 bw = 2;
322 break;
323 case 0xf:
324 default:
325 bw = 4;
326 break;
327 }
328
329 for (i = 0; i < bw; i++) {
330 gate[i] = readl(PHY_REG(phy_base, 0xfb + i));
331 max_val = MAX(max_val, gate[i]);
332 min_val = MIN(min_val, gate[i]);
333 }
334
335 if (max_val > 0x80 || min_val < 0x20)
336 return -1;
337 else
338 return 0;
339}
340
341static int data_training(struct dram_info *dram, u32 cs, u32 dramtype)
342{
343 void __iomem *pctl_base = dram->pctl;
344 u32 dis_auto_zq = 0;
345 u32 pwrctl;
346 u32 ret;
347
348 /* disable auto low-power */
349 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
350 writel(0, pctl_base + DDR_PCTL2_PWRCTL);
351
352 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
353
354 ret = phy_data_training(dram->phy, cs, dramtype);
355
356 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
357
358 /* restore auto low-power */
359 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
360
361 return ret;
362}
363
364static void dram_set_bw(struct dram_info *dram, u32 bw)
365{
366 phy_dram_set_bw(dram->phy, bw);
367}
368
369static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
370{
371 writel(ddrconfig | (ddrconfig << 8), &dram->msch->deviceconf);
372 rk_clrsetreg(&dram->grf->soc_noc_con[1], 0x3 << 14, 0 << 14);
373}
374
375static void sdram_msch_config(struct msch_regs *msch,
376 struct sdram_msch_timings *noc_timings,
377 struct sdram_cap_info *cap_info,
378 struct sdram_base_params *base)
379{
380 u64 cs_cap[2];
381
382 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, base->dramtype);
383 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, base->dramtype);
384 writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
385 (((cs_cap[0] >> 20) / 64) & 0xff),
386 &msch->devicesize);
387
388 writel(noc_timings->ddrtiminga0.d32,
389 &msch->ddrtiminga0);
390 writel(noc_timings->ddrtimingb0.d32,
391 &msch->ddrtimingb0);
392 writel(noc_timings->ddrtimingc0.d32,
393 &msch->ddrtimingc0);
394 writel(noc_timings->devtodev0.d32,
395 &msch->devtodev0);
396 writel(noc_timings->ddrmode.d32, &msch->ddrmode);
397 writel(noc_timings->ddr4timing.d32,
398 &msch->ddr4timing);
399 writel(noc_timings->agingx0, &msch->agingx0);
400 writel(noc_timings->agingx0, &msch->aging0);
401 writel(noc_timings->agingx0, &msch->aging1);
402 writel(noc_timings->agingx0, &msch->aging2);
403 writel(noc_timings->agingx0, &msch->aging3);
404}
405
406static void dram_all_config(struct dram_info *dram,
407 struct px30_sdram_params *sdram_params)
408{
409 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
410 u32 sys_reg2 = 0;
411 u32 sys_reg3 = 0;
412
413 set_ddrconfig(dram, cap_info->ddrconfig);
414 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
415 &sys_reg3, 0);
416 writel(sys_reg2, &dram->pmugrf->os_reg[2]);
417 writel(sys_reg3, &dram->pmugrf->os_reg[3]);
418 sdram_msch_config(dram->msch, &sdram_params->ch.noc_timings, cap_info,
419 &sdram_params->base);
420}
421
422static void enable_low_power(struct dram_info *dram,
423 struct px30_sdram_params *sdram_params)
424{
425 void __iomem *pctl_base = dram->pctl;
426 void __iomem *phy_base = dram->phy;
427 void __iomem *ddr_grf_base = dram->ddr_grf;
428 u32 grf_lp_con;
429
430 /*
431 * bit0: grf_upctl_axi_cg_en = 1 enable upctl2 axi clk auto gating
432 * bit1: grf_upctl_apb_cg_en = 1 ungated axi,core clk for apb access
433 * bit2: grf_upctl_core_cg_en = 1 enable upctl2 core clk auto gating
434 * bit3: grf_selfref_type2_en = 0 disable core clk gating when type2 sr
435 * bit4: grf_upctl_syscreq_cg_en = 1
436 * ungating coreclk when c_sysreq assert
437 * bit8-11: grf_auto_sr_dly = 6
438 */
439 writel(0x1f1f0617, &dram->ddr_grf->ddr_grf_con[1]);
440
441 if (sdram_params->base.dramtype == DDR4)
442 grf_lp_con = (0x7 << 16) | (1 << 1);
443 else if (sdram_params->base.dramtype == DDR3)
444 grf_lp_con = (0x7 << 16) | (1 << 0);
445 else
446 grf_lp_con = (0x7 << 16) | (1 << 2);
447
448 /* en lpckdis_en */
449 grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
450 writel(grf_lp_con, ddr_grf_base + DDR_GRF_LP_CON);
451
452 /* off digit module clock when enter power down */
453 setbits_le32(PHY_REG(phy_base, 7), 1 << 7);
454
455 /* enable sr, pd */
456 if (PD_IDLE == 0)
457 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
458 else
459 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
460 if (SR_IDLE == 0)
461 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
462 else
463 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
464 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
465}
466
467/*
468 * pre_init: 0: pre init for dram cap detect
469 * 1: detect correct cap(except cs1 row)info, than reinit
470 * 2: after reinit, we detect cs1_row, if cs1_row not equal
471 * to cs0_row and cs is in middle on ddrconf map, we need
472 * to reinit dram, than set the correct ddrconf.
473 */
474static int sdram_init_(struct dram_info *dram,
475 struct px30_sdram_params *sdram_params, u32 pre_init)
476{
477 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
478 void __iomem *pctl_base = dram->pctl;
479
480 rkclk_ddr_reset(dram, 1, 1, 1, 1);
481 udelay(10);
482 /*
483 * dereset ddr phy psrstn to config pll,
484 * if using phy pll psrstn must be dereset
485 * before config pll
486 */
487 rkclk_ddr_reset(dram, 1, 1, 1, 0);
488 rkclk_configure_ddr(dram, sdram_params);
489
490 /* release phy srst to provide clk to ctrl */
491 rkclk_ddr_reset(dram, 1, 1, 0, 0);
492 udelay(10);
493 phy_soft_reset(dram->phy);
494 /* release ctrl presetn, and config ctl registers */
495 rkclk_ddr_reset(dram, 1, 0, 0, 0);
496 pctl_cfg(dram->pctl, &sdram_params->pctl_regs, SR_IDLE, PD_IDLE);
497 cap_info->ddrconfig = calculate_ddrconfig(sdram_params);
498 set_ctl_address_map(dram, sdram_params);
499 phy_cfg(dram->phy, &sdram_params->phy_regs, sdram_params->skew,
500 &sdram_params->base, cap_info->bw);
501
502 /* enable dfi_init_start to init phy after ctl srstn deassert */
503 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
504
505 rkclk_ddr_reset(dram, 0, 0, 0, 0);
506 /* wait for dfi_init_done and dram init complete */
507 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
508 continue;
509
510 if (sdram_params->base.dramtype == LPDDR3)
511 pctl_write_mr(dram->pctl, 3, 11, 3, LPDDR3);
512
513 /* do ddr gate training */
514redo_cs0_training:
515 if (data_training(dram, 0, sdram_params->base.dramtype) != 0) {
516 if (pre_init != 0)
517 printascii("DTT cs0 error\n");
518 return -1;
519 }
520 if (check_rd_gate(dram)) {
521 printascii("re training cs0");
522 goto redo_cs0_training;
523 }
524
525 if (sdram_params->base.dramtype == LPDDR3) {
526 if ((read_mr(dram, 1, 8) & 0x3) != 0x3)
527 return -1;
528 } else if (sdram_params->base.dramtype == LPDDR2) {
529 if ((read_mr(dram, 1, 8) & 0x3) != 0x0)
530 return -1;
531 }
532 /* for px30: when 2cs, both 2 cs should be training */
533 if (pre_init != 0 && cap_info->rank == 2) {
534redo_cs1_training:
535 if (data_training(dram, 1, sdram_params->base.dramtype) != 0) {
536 printascii("DTT cs1 error\n");
537 return -1;
538 }
539 if (check_rd_gate(dram)) {
540 printascii("re training cs1");
541 goto redo_cs1_training;
542 }
543 }
544
545 if (sdram_params->base.dramtype == DDR4)
546 pctl_write_vrefdq(dram->pctl, 0x3, 5670,
547 sdram_params->base.dramtype);
548
549 dram_all_config(dram, sdram_params);
550 enable_low_power(dram, sdram_params);
551
552 return 0;
553}
554
555static int dram_detect_cap(struct dram_info *dram,
556 struct px30_sdram_params *sdram_params,
557 unsigned char channel)
558{
559 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
560
561 /*
562 * for ddr3: ddrconf = 3
563 * for ddr4: ddrconf = 12
564 * for lpddr3: ddrconf = 3
565 * default bw = 1
566 */
567 u32 bk, bktmp;
568 u32 col, coltmp;
569 u32 rowtmp;
570 u32 cs;
571 u32 bw = 1;
572 u32 dram_type = sdram_params->base.dramtype;
573
574 if (dram_type != DDR4) {
575 /* detect col and bk for ddr3/lpddr3 */
576 coltmp = 12;
577 bktmp = 3;
578 if (dram_type == LPDDR2)
579 rowtmp = 15;
580 else
581 rowtmp = 16;
582
583 if (sdram_detect_col(cap_info, coltmp) != 0)
584 goto cap_err;
585 sdram_detect_bank(cap_info, coltmp, bktmp);
586 sdram_detect_dbw(cap_info, dram_type);
587 } else {
588 /* detect bg for ddr4 */
589 coltmp = 10;
590 bktmp = 4;
591 rowtmp = 17;
592
593 col = 10;
594 bk = 2;
595 cap_info->col = col;
596 cap_info->bk = bk;
597 sdram_detect_bg(cap_info, coltmp);
598 }
599
600 /* detect row */
601 if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
602 goto cap_err;
603
604 /* detect row_3_4 */
605 sdram_detect_row_3_4(cap_info, coltmp, bktmp);
606
607 /* bw and cs detect using data training */
608 if (data_training(dram, 1, dram_type) == 0)
609 cs = 1;
610 else
611 cs = 0;
612 cap_info->rank = cs + 1;
613
614 dram_set_bw(dram, 2);
615 if (data_training(dram, 0, dram_type) == 0)
616 bw = 2;
617 else
618 bw = 1;
619 cap_info->bw = bw;
620
621 cap_info->cs0_high16bit_row = cap_info->cs0_row;
622 if (cs) {
623 cap_info->cs1_row = cap_info->cs0_row;
624 cap_info->cs1_high16bit_row = cap_info->cs0_row;
625 } else {
626 cap_info->cs1_row = 0;
627 cap_info->cs1_high16bit_row = 0;
628 }
629
630 return 0;
631cap_err:
632 return -1;
633}
634
635/* return: 0 = success, other = fail */
636static int sdram_init_detect(struct dram_info *dram,
637 struct px30_sdram_params *sdram_params)
638{
639 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
640 u32 ret;
641 u32 sys_reg = 0;
642 u32 sys_reg3 = 0;
643
644 if (sdram_init_(dram, sdram_params, 0) != 0)
645 return -1;
646
647 if (dram_detect_cap(dram, sdram_params, 0) != 0)
648 return -1;
649
650 /* modify bw, cs related timing */
651 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
652 sdram_params->base.dramtype);
653 /* reinit sdram by real dram cap */
654 ret = sdram_init_(dram, sdram_params, 1);
655 if (ret != 0)
656 goto out;
657
658 /* redetect cs1 row */
659 sdram_detect_cs1_row(cap_info, sdram_params->base.dramtype);
660 if (cap_info->cs1_row) {
661 sys_reg = readl(&dram->pmugrf->os_reg[2]);
662 sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
663 SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
664 sys_reg, sys_reg3, 0);
665 writel(sys_reg, &dram->pmugrf->os_reg[2]);
666 writel(sys_reg3, &dram->pmugrf->os_reg[3]);
667 }
668
669 ret = sdram_detect_high_row(cap_info);
670
671out:
672 return ret;
673}
674
675struct px30_sdram_params
676 *get_default_sdram_config(void)
677{
678 sdram_configs[0].skew = &skew;
679
680 return &sdram_configs[0];
681}
682
683/* return: 0 = success, other = fail */
684int sdram_init(void)
685{
686 struct px30_sdram_params *sdram_params;
687 int ret = 0;
688
689 dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
690 dram_info.pctl = (void *)DDRC_BASE_ADDR;
691 dram_info.grf = (void *)GRF_BASE_ADDR;
692 dram_info.cru = (void *)CRU_BASE_ADDR;
693 dram_info.msch = (void *)SERVER_MSCH0_BASE_ADDR;
694 dram_info.ddr_grf = (void *)DDR_GRF_BASE_ADDR;
695 dram_info.pmugrf = (void *)PMUGRF_BASE_ADDR;
696
697 sdram_params = get_default_sdram_config();
698 ret = sdram_init_detect(&dram_info, sdram_params);
699
700 if (ret)
701 goto error;
702
703 sdram_print_ddr_info(&sdram_params->ch.cap_info, &sdram_params->base);
704
705 printascii("out\n");
706 return ret;
707error:
708 return (-1);
709}
710#else
711
712static int px30_dmc_probe(struct udevice *dev)
713{
714 struct dram_info *priv = dev_get_priv(dev);
715
716 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
717 debug("%s: grf=%p\n", __func__, priv->pmugrf);
718 priv->info.base = CONFIG_SYS_SDRAM_BASE;
719 priv->info.size =
720 rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg[2]);
721
722 return 0;
723}
724
725static int px30_dmc_get_info(struct udevice *dev, struct ram_info *info)
726{
727 struct dram_info *priv = dev_get_priv(dev);
728
729 *info = priv->info;
730
731 return 0;
732}
733
734static struct ram_ops px30_dmc_ops = {
735 .get_info = px30_dmc_get_info,
736};
737
738static const struct udevice_id px30_dmc_ids[] = {
739 { .compatible = "rockchip,px30-dmc" },
740 { }
741};
742
743U_BOOT_DRIVER(dmc_px30) = {
744 .name = "rockchip_px30_dmc",
745 .id = UCLASS_RAM,
746 .of_match = px30_dmc_ids,
747 .ops = &px30_dmc_ops,
748 .probe = px30_dmc_probe,
749 .priv_auto_alloc_size = sizeof(struct dram_info),
750};
751#endif /* CONFIG_TPL_BUILD */