blob: c99118fd6123b7ed3e258de3fa4bde6ce6cb1d8c [file] [log] [blame]
Kever Yang6fc9ebf2018-12-20 11:33:42 +08001// SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
Simon Glass0aced102015-08-30 16:55:36 -06002/*
3 * (C) Copyright 2015 Google, Inc
4 * Copyright 2014 Rockchip Inc.
5 *
Simon Glass0aced102015-08-30 16:55:36 -06006 * Adapted from coreboot.
7 */
8
9#include <common.h>
10#include <clk.h>
11#include <dm.h>
Simon Glass4d1192c2016-07-04 11:58:35 -060012#include <dt-structs.h>
Simon Glass0aced102015-08-30 16:55:36 -060013#include <errno.h>
Simon Glassf11478f2019-12-28 10:45:07 -070014#include <hang.h>
Simon Glass97589732020-05-10 11:40:02 -060015#include <init.h>
Simon Glass0f2af882020-05-10 11:40:05 -060016#include <log.h>
Simon Glass0aced102015-08-30 16:55:36 -060017#include <ram.h>
18#include <regmap.h>
19#include <syscon.h>
20#include <asm/io.h>
Kever Yang9fbe17c2019-03-28 11:01:23 +080021#include <asm/arch-rockchip/clock.h>
Jagan Teki783acfd2020-01-09 14:22:17 +053022#include <asm/arch-rockchip/cru.h>
Kever Yang9fbe17c2019-03-28 11:01:23 +080023#include <asm/arch-rockchip/ddr_rk3288.h>
24#include <asm/arch-rockchip/grf_rk3288.h>
25#include <asm/arch-rockchip/pmu_rk3288.h>
Kever Yange47db832019-11-15 11:04:33 +080026#include <asm/arch-rockchip/sdram.h>
Kever Yangcdbb38a2019-11-15 11:04:32 +080027#include <asm/arch-rockchip/sdram_rk3288.h>
Simon Glassdbd79542020-05-10 11:40:11 -060028#include <linux/delay.h>
Simon Glass0aced102015-08-30 16:55:36 -060029#include <linux/err.h>
Simon Glass94906e42016-01-21 19:45:17 -070030#include <power/regulator.h>
Jacob Chen614704b2017-05-02 14:54:52 +080031#include <power/rk8xx_pmic.h>
Simon Glass0aced102015-08-30 16:55:36 -060032
Simon Glass0aced102015-08-30 16:55:36 -060033struct chan_info {
34 struct rk3288_ddr_pctl *pctl;
35 struct rk3288_ddr_publ *publ;
36 struct rk3288_msch *msch;
37};
38
39struct dram_info {
40 struct chan_info chan[2];
41 struct ram_info info;
Stephen Warrena9622432016-06-17 09:44:00 -060042 struct clk ddr_clk;
Jagan Teki783acfd2020-01-09 14:22:17 +053043 struct rockchip_cru *cru;
Simon Glass0aced102015-08-30 16:55:36 -060044 struct rk3288_grf *grf;
45 struct rk3288_sgrf *sgrf;
46 struct rk3288_pmu *pmu;
Simon Glass8e6af6f2016-07-04 11:58:34 -060047 bool is_veyron;
Simon Glass0aced102015-08-30 16:55:36 -060048};
49
Simon Glass8e6af6f2016-07-04 11:58:34 -060050struct rk3288_sdram_params {
Simon Glass4d1192c2016-07-04 11:58:35 -060051#if CONFIG_IS_ENABLED(OF_PLATDATA)
52 struct dtd_rockchip_rk3288_dmc of_plat;
53#endif
Simon Glass8e6af6f2016-07-04 11:58:34 -060054 struct rk3288_sdram_channel ch[2];
55 struct rk3288_sdram_pctl_timing pctl_timing;
56 struct rk3288_sdram_phy_timing phy_timing;
57 struct rk3288_base_params base;
58 int num_channels;
59 struct regmap *map;
60};
61
Heiko Stübner493cc0d2017-02-18 19:46:24 +010062const int ddrconf_table[] = {
63 /* row col,bw */
64 0,
65 ((1 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
66 ((2 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
67 ((3 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
68 ((4 << DDRCONF_ROW_SHIFT) | 1 << DDRCONF_COL_SHIFT),
69 ((1 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
70 ((2 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
71 ((3 << DDRCONF_ROW_SHIFT) | 2 << DDRCONF_COL_SHIFT),
72 ((1 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
73 ((2 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
74 ((3 << DDRCONF_ROW_SHIFT) | 0 << DDRCONF_COL_SHIFT),
75 0,
76 0,
77 0,
78 0,
79 ((4 << 4) | 2),
80};
81
Kever Yangcdcb91c2016-10-07 17:47:58 +080082#define TEST_PATTEN 0x5aa5f00f
83#define DQS_GATE_TRAINING_ERROR_RANK0 (1 << 4)
84#define DQS_GATE_TRAINING_ERROR_RANK1 (2 << 4)
85
Jagan Teki395c2e72019-09-17 11:40:38 +053086#if defined(CONFIG_TPL_BUILD) || \
87 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
Simon Glass0aced102015-08-30 16:55:36 -060088static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
89{
90 int i;
91
92 for (i = 0; i < n / sizeof(u32); i++) {
93 writel(*src, dest);
94 src++;
95 dest++;
96 }
97}
98
Jagan Teki783acfd2020-01-09 14:22:17 +053099static void ddr_reset(struct rockchip_cru *cru, u32 ch, u32 ctl, u32 phy)
Simon Glass0aced102015-08-30 16:55:36 -0600100{
101 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
102 u32 ctl_psrstn_shift = 3 + 5 * ch;
103 u32 ctl_srstn_shift = 2 + 5 * ch;
104 u32 phy_psrstn_shift = 1 + 5 * ch;
105 u32 phy_srstn_shift = 5 * ch;
106
107 rk_clrsetreg(&cru->cru_softrst_con[10],
108 1 << phy_ctl_srstn_shift | 1 << ctl_psrstn_shift |
109 1 << ctl_srstn_shift | 1 << phy_psrstn_shift |
110 1 << phy_srstn_shift,
111 phy << phy_ctl_srstn_shift | ctl << ctl_psrstn_shift |
112 ctl << ctl_srstn_shift | phy << phy_psrstn_shift |
113 phy << phy_srstn_shift);
114}
115
Jagan Teki783acfd2020-01-09 14:22:17 +0530116static void ddr_phy_ctl_reset(struct rockchip_cru *cru, u32 ch, u32 n)
Simon Glass0aced102015-08-30 16:55:36 -0600117{
118 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
119
120 rk_clrsetreg(&cru->cru_softrst_con[10],
121 1 << phy_ctl_srstn_shift, n << phy_ctl_srstn_shift);
122}
123
Jagan Teki783acfd2020-01-09 14:22:17 +0530124static void phy_pctrl_reset(struct rockchip_cru *cru,
Simon Glass0aced102015-08-30 16:55:36 -0600125 struct rk3288_ddr_publ *publ,
Heiko Stübner493cc0d2017-02-18 19:46:24 +0100126 int channel)
Simon Glass0aced102015-08-30 16:55:36 -0600127{
128 int i;
129
130 ddr_reset(cru, channel, 1, 1);
131 udelay(1);
132 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
133 for (i = 0; i < 4; i++)
134 clrbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
135
136 udelay(10);
137 setbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
138 for (i = 0; i < 4; i++)
139 setbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
140
141 udelay(10);
142 ddr_reset(cru, channel, 1, 0);
143 udelay(10);
144 ddr_reset(cru, channel, 0, 0);
145 udelay(10);
146}
147
148static void phy_dll_bypass_set(struct rk3288_ddr_publ *publ,
149 u32 freq)
150{
151 int i;
Heiko Stübner493cc0d2017-02-18 19:46:24 +0100152
Simon Glass0aced102015-08-30 16:55:36 -0600153 if (freq <= 250000000) {
154 if (freq <= 150000000)
155 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
156 else
157 setbits_le32(&publ->dllgcr, SBIAS_BYPASS);
158 setbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
159 for (i = 0; i < 4; i++)
160 setbits_le32(&publ->datx8[i].dxdllcr,
161 DXDLLCR_DLLDIS);
162
163 setbits_le32(&publ->pir, PIR_DLLBYP);
164 } else {
165 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
166 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
167 for (i = 0; i < 4; i++) {
168 clrbits_le32(&publ->datx8[i].dxdllcr,
169 DXDLLCR_DLLDIS);
170 }
171
172 clrbits_le32(&publ->pir, PIR_DLLBYP);
173 }
174}
175
176static void dfi_cfg(struct rk3288_ddr_pctl *pctl, u32 dramtype)
177{
178 writel(DFI_INIT_START, &pctl->dfistcfg0);
179 writel(DFI_DRAM_CLK_SR_EN | DFI_DRAM_CLK_DPD_EN,
180 &pctl->dfistcfg1);
181 writel(DFI_PARITY_INTR_EN | DFI_PARITY_EN, &pctl->dfistcfg2);
182 writel(7 << TLP_RESP_TIME_SHIFT | LP_SR_EN | LP_PD_EN,
183 &pctl->dfilpcfg0);
184
185 writel(2 << TCTRL_DELAY_TIME_SHIFT, &pctl->dfitctrldelay);
186 writel(1 << TPHY_WRDATA_TIME_SHIFT, &pctl->dfitphywrdata);
187 writel(0xf << TPHY_RDLAT_TIME_SHIFT, &pctl->dfitphyrdlat);
188 writel(2 << TDRAM_CLK_DIS_TIME_SHIFT, &pctl->dfitdramclkdis);
189 writel(2 << TDRAM_CLK_EN_TIME_SHIFT, &pctl->dfitdramclken);
190 writel(1, &pctl->dfitphyupdtype0);
191
192 /* cs0 and cs1 write odt enable */
193 writel((RANK0_ODT_WRITE_SEL | RANK1_ODT_WRITE_SEL),
194 &pctl->dfiodtcfg);
195 /* odt write length */
196 writel(7 << ODT_LEN_BL8_W_SHIFT, &pctl->dfiodtcfg1);
197 /* phyupd and ctrlupd disabled */
198 writel(0, &pctl->dfiupdcfg);
199}
200
201static void ddr_set_enable(struct rk3288_grf *grf, uint channel, bool enable)
202{
203 uint val = 0;
204
205 if (enable) {
206 val = 1 << (channel ? DDR1_16BIT_EN_SHIFT :
207 DDR0_16BIT_EN_SHIFT);
208 }
209 rk_clrsetreg(&grf->soc_con0,
210 1 << (channel ? DDR1_16BIT_EN_SHIFT : DDR0_16BIT_EN_SHIFT),
211 val);
212}
213
214static void ddr_set_ddr3_mode(struct rk3288_grf *grf, uint channel,
215 bool ddr3_mode)
216{
217 uint mask, val;
218
219 mask = 1 << (channel ? MSCH1_MAINDDR3_SHIFT : MSCH0_MAINDDR3_SHIFT);
220 val = ddr3_mode << (channel ? MSCH1_MAINDDR3_SHIFT :
221 MSCH0_MAINDDR3_SHIFT);
222 rk_clrsetreg(&grf->soc_con0, mask, val);
223}
224
225static void ddr_set_en_bst_odt(struct rk3288_grf *grf, uint channel,
226 bool enable, bool enable_bst, bool enable_odt)
227{
228 uint mask;
229 bool disable_bst = !enable_bst;
230
231 mask = channel ?
232 (1 << LPDDR3_EN1_SHIFT | 1 << UPCTL1_BST_DIABLE_SHIFT |
233 1 << UPCTL1_LPDDR3_ODT_EN_SHIFT) :
234 (1 << LPDDR3_EN0_SHIFT | 1 << UPCTL0_BST_DIABLE_SHIFT |
235 1 << UPCTL0_LPDDR3_ODT_EN_SHIFT);
236 rk_clrsetreg(&grf->soc_con2, mask,
237 enable << (channel ? LPDDR3_EN1_SHIFT : LPDDR3_EN0_SHIFT) |
238 disable_bst << (channel ? UPCTL1_BST_DIABLE_SHIFT :
239 UPCTL0_BST_DIABLE_SHIFT) |
240 enable_odt << (channel ? UPCTL1_LPDDR3_ODT_EN_SHIFT :
241 UPCTL0_LPDDR3_ODT_EN_SHIFT));
242}
243
Heiko Stübner493cc0d2017-02-18 19:46:24 +0100244static void pctl_cfg(int channel, struct rk3288_ddr_pctl *pctl,
Kever Yangcdcb91c2016-10-07 17:47:58 +0800245 struct rk3288_sdram_params *sdram_params,
Simon Glass0aced102015-08-30 16:55:36 -0600246 struct rk3288_grf *grf)
247{
248 unsigned int burstlen;
249
250 burstlen = (sdram_params->base.noc_timing >> 18) & 0x7;
251 copy_to_reg(&pctl->togcnt1u, &sdram_params->pctl_timing.togcnt1u,
252 sizeof(sdram_params->pctl_timing));
253 switch (sdram_params->base.dramtype) {
254 case LPDDR3:
255 writel(sdram_params->pctl_timing.tcl - 1,
256 &pctl->dfitrddataen);
257 writel(sdram_params->pctl_timing.tcwl,
258 &pctl->dfitphywrlat);
259 burstlen >>= 1;
260 writel(LPDDR2_S4 | 0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
261 LPDDR2_EN | burstlen << BURSTLENGTH_SHIFT |
262 (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
263 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
264 &pctl->mcfg);
265 ddr_set_ddr3_mode(grf, channel, false);
266 ddr_set_enable(grf, channel, true);
267 ddr_set_en_bst_odt(grf, channel, true, false,
268 sdram_params->base.odt);
269 break;
270 case DDR3:
271 if (sdram_params->phy_timing.mr[1] & DDR3_DLL_DISABLE) {
272 writel(sdram_params->pctl_timing.tcl - 3,
273 &pctl->dfitrddataen);
274 } else {
275 writel(sdram_params->pctl_timing.tcl - 2,
276 &pctl->dfitrddataen);
277 }
278 writel(sdram_params->pctl_timing.tcwl - 1,
279 &pctl->dfitphywrlat);
280 writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT | DDR3_EN |
281 DDR2_DDR3_BL_8 | (6 - 4) << TFAW_SHIFT | PD_EXIT_SLOW |
282 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
283 &pctl->mcfg);
284 ddr_set_ddr3_mode(grf, channel, true);
285 ddr_set_enable(grf, channel, true);
286
287 ddr_set_en_bst_odt(grf, channel, false, true, false);
288 break;
289 }
290
291 setbits_le32(&pctl->scfg, 1);
292}
293
Heiko Stübner493cc0d2017-02-18 19:46:24 +0100294static void phy_cfg(const struct chan_info *chan, int channel,
Kever Yangcdcb91c2016-10-07 17:47:58 +0800295 struct rk3288_sdram_params *sdram_params)
Simon Glass0aced102015-08-30 16:55:36 -0600296{
297 struct rk3288_ddr_publ *publ = chan->publ;
298 struct rk3288_msch *msch = chan->msch;
299 uint ddr_freq_mhz = sdram_params->base.ddr_freq / 1000000;
300 u32 dinit2, tmp;
301 int i;
302
303 dinit2 = DIV_ROUND_UP(ddr_freq_mhz * 200000, 1000);
304 /* DDR PHY Timing */
305 copy_to_reg(&publ->dtpr[0], &sdram_params->phy_timing.dtpr0,
306 sizeof(sdram_params->phy_timing));
307 writel(sdram_params->base.noc_timing, &msch->ddrtiming);
308 writel(0x3f, &msch->readlatency);
309 writel(sdram_params->base.noc_activate, &msch->activate);
310 writel(2 << BUSWRTORD_SHIFT | 2 << BUSRDTOWR_SHIFT |
311 1 << BUSRDTORD_SHIFT, &msch->devtodev);
312 writel(DIV_ROUND_UP(ddr_freq_mhz * 5120, 1000) << PRT_DLLLOCK_SHIFT |
313 DIV_ROUND_UP(ddr_freq_mhz * 50, 1000) << PRT_DLLSRST_SHIFT |
314 8 << PRT_ITMSRST_SHIFT, &publ->ptr[0]);
315 writel(DIV_ROUND_UP(ddr_freq_mhz * 500000, 1000) << PRT_DINIT0_SHIFT |
316 DIV_ROUND_UP(ddr_freq_mhz * 400, 1000) << PRT_DINIT1_SHIFT,
317 &publ->ptr[1]);
318 writel(min(dinit2, 0x1ffffU) << PRT_DINIT2_SHIFT |
319 DIV_ROUND_UP(ddr_freq_mhz * 1000, 1000) << PRT_DINIT3_SHIFT,
320 &publ->ptr[2]);
321
322 switch (sdram_params->base.dramtype) {
323 case LPDDR3:
324 clrsetbits_le32(&publ->pgcr, 0x1F,
325 0 << PGCR_DFTLMT_SHIFT |
326 0 << PGCR_DFTCMP_SHIFT |
327 1 << PGCR_DQSCFG_SHIFT |
328 0 << PGCR_ITMDMD_SHIFT);
329 /* DDRMODE select LPDDR3 */
330 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
331 DDRMD_LPDDR2_LPDDR3 << DDRMD_SHIFT);
332 clrsetbits_le32(&publ->dxccr,
333 DQSNRES_MASK << DQSNRES_SHIFT |
334 DQSRES_MASK << DQSRES_SHIFT,
335 4 << DQSRES_SHIFT | 0xc << DQSNRES_SHIFT);
336 tmp = readl(&publ->dtpr[1]);
337 tmp = ((tmp >> TDQSCKMAX_SHIFT) & TDQSCKMAX_MASK) -
338 ((tmp >> TDQSCK_SHIFT) & TDQSCK_MASK);
339 clrsetbits_le32(&publ->dsgcr,
340 DQSGE_MASK << DQSGE_SHIFT |
341 DQSGX_MASK << DQSGX_SHIFT,
342 tmp << DQSGE_SHIFT | tmp << DQSGX_SHIFT);
343 break;
344 case DDR3:
345 clrbits_le32(&publ->pgcr, 0x1f);
346 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
347 DDRMD_DDR3 << DDRMD_SHIFT);
348 break;
349 }
350 if (sdram_params->base.odt) {
351 /*dynamic RTT enable */
352 for (i = 0; i < 4; i++)
353 setbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
354 } else {
355 /*dynamic RTT disable */
356 for (i = 0; i < 4; i++)
357 clrbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
358 }
359}
360
361static void phy_init(struct rk3288_ddr_publ *publ)
362{
363 setbits_le32(&publ->pir, PIR_INIT | PIR_DLLSRST
364 | PIR_DLLLOCK | PIR_ZCAL | PIR_ITMSRST | PIR_CLRSR);
365 udelay(1);
366 while ((readl(&publ->pgsr) &
367 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE)) !=
368 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE))
369 ;
370}
371
372static void send_command(struct rk3288_ddr_pctl *pctl, u32 rank,
373 u32 cmd, u32 arg)
374{
375 writel((START_CMD | (rank << 20) | arg | cmd), &pctl->mcmd);
376 udelay(1);
377 while (readl(&pctl->mcmd) & START_CMD)
378 ;
379}
380
381static inline void send_command_op(struct rk3288_ddr_pctl *pctl,
382 u32 rank, u32 cmd, u32 ma, u32 op)
383{
384 send_command(pctl, rank, cmd, (ma & LPDDR2_MA_MASK) << LPDDR2_MA_SHIFT |
385 (op & LPDDR2_OP_MASK) << LPDDR2_OP_SHIFT);
386}
387
388static void memory_init(struct rk3288_ddr_publ *publ,
389 u32 dramtype)
390{
391 setbits_le32(&publ->pir,
392 (PIR_INIT | PIR_DRAMINIT | PIR_LOCKBYP
393 | PIR_ZCALBYP | PIR_CLRSR | PIR_ICPC
394 | (dramtype == DDR3 ? PIR_DRAMRST : 0)));
395 udelay(1);
396 while ((readl(&publ->pgsr) & (PGSR_IDONE | PGSR_DLDONE))
397 != (PGSR_IDONE | PGSR_DLDONE))
398 ;
399}
400
401static void move_to_config_state(struct rk3288_ddr_publ *publ,
402 struct rk3288_ddr_pctl *pctl)
403{
404 unsigned int state;
405
406 while (1) {
407 state = readl(&pctl->stat) & PCTL_STAT_MSK;
408
409 switch (state) {
410 case LOW_POWER:
411 writel(WAKEUP_STATE, &pctl->sctl);
412 while ((readl(&pctl->stat) & PCTL_STAT_MSK)
413 != ACCESS)
414 ;
415 /* wait DLL lock */
416 while ((readl(&publ->pgsr) & PGSR_DLDONE)
417 != PGSR_DLDONE)
418 ;
Heiko Stübner493cc0d2017-02-18 19:46:24 +0100419 /*
420 * if at low power state,need wakeup first,
Simon Glass0aced102015-08-30 16:55:36 -0600421 * and then enter the config
422 * so here no break.
423 */
424 case ACCESS:
425 /* no break */
426 case INIT_MEM:
427 writel(CFG_STATE, &pctl->sctl);
428 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
429 ;
430 break;
431 case CONFIG:
432 return;
433 default:
434 break;
435 }
436 }
437}
438
Heiko Stübner493cc0d2017-02-18 19:46:24 +0100439static void set_bandwidth_ratio(const struct chan_info *chan, int channel,
Simon Glass0aced102015-08-30 16:55:36 -0600440 u32 n, struct rk3288_grf *grf)
441{
442 struct rk3288_ddr_pctl *pctl = chan->pctl;
443 struct rk3288_ddr_publ *publ = chan->publ;
444 struct rk3288_msch *msch = chan->msch;
445
446 if (n == 1) {
447 setbits_le32(&pctl->ppcfg, 1);
Simon Glassa7376f02016-01-21 19:44:01 -0700448 rk_setreg(&grf->soc_con0, 1 << (8 + channel));
Simon Glass0aced102015-08-30 16:55:36 -0600449 setbits_le32(&msch->ddrtiming, 1 << 31);
450 /* Data Byte disable*/
451 clrbits_le32(&publ->datx8[2].dxgcr, 1);
452 clrbits_le32(&publ->datx8[3].dxgcr, 1);
Simon Glassf1a81c02016-01-21 19:45:10 -0700453 /* disable DLL */
Simon Glass0aced102015-08-30 16:55:36 -0600454 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
455 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
456 } else {
457 clrbits_le32(&pctl->ppcfg, 1);
Simon Glassa7376f02016-01-21 19:44:01 -0700458 rk_clrreg(&grf->soc_con0, 1 << (8 + channel));
Simon Glass0aced102015-08-30 16:55:36 -0600459 clrbits_le32(&msch->ddrtiming, 1 << 31);
460 /* Data Byte enable*/
461 setbits_le32(&publ->datx8[2].dxgcr, 1);
462 setbits_le32(&publ->datx8[3].dxgcr, 1);
463
Simon Glassf1a81c02016-01-21 19:45:10 -0700464 /* enable DLL */
Simon Glass0aced102015-08-30 16:55:36 -0600465 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
466 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
467 /* reset DLL */
468 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
469 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
470 udelay(10);
471 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
472 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
473 }
474 setbits_le32(&pctl->dfistcfg0, 1 << 2);
475}
476
Heiko Stübner493cc0d2017-02-18 19:46:24 +0100477static int data_training(const struct chan_info *chan, int channel,
Kever Yangcdcb91c2016-10-07 17:47:58 +0800478 struct rk3288_sdram_params *sdram_params)
Simon Glass0aced102015-08-30 16:55:36 -0600479{
480 unsigned int j;
481 int ret = 0;
482 u32 rank;
483 int i;
484 u32 step[2] = { PIR_QSTRN, PIR_RVTRN };
485 struct rk3288_ddr_publ *publ = chan->publ;
486 struct rk3288_ddr_pctl *pctl = chan->pctl;
487
488 /* disable auto refresh */
489 writel(0, &pctl->trefi);
490
491 if (sdram_params->base.dramtype != LPDDR3)
492 setbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
493 rank = sdram_params->ch[channel].rank | 1;
494 for (j = 0; j < ARRAY_SIZE(step); j++) {
495 /*
496 * trigger QSTRN and RVTRN
497 * clear DTDONE status
498 */
499 setbits_le32(&publ->pir, PIR_CLRSR);
500
501 /* trigger DTT */
502 setbits_le32(&publ->pir,
503 PIR_INIT | step[j] | PIR_LOCKBYP | PIR_ZCALBYP |
504 PIR_CLRSR);
505 udelay(1);
506 /* wait echo byte DTDONE */
507 while ((readl(&publ->datx8[0].dxgsr[0]) & rank)
508 != rank)
509 ;
510 while ((readl(&publ->datx8[1].dxgsr[0]) & rank)
511 != rank)
512 ;
513 if (!(readl(&pctl->ppcfg) & 1)) {
514 while ((readl(&publ->datx8[2].dxgsr[0])
515 & rank) != rank)
516 ;
517 while ((readl(&publ->datx8[3].dxgsr[0])
518 & rank) != rank)
519 ;
520 }
521 if (readl(&publ->pgsr) &
522 (PGSR_DTERR | PGSR_RVERR | PGSR_RVEIRR)) {
523 ret = -1;
524 break;
525 }
526 }
527 /* send some auto refresh to complement the lost while DTT */
528 for (i = 0; i < (rank > 1 ? 8 : 4); i++)
529 send_command(pctl, rank, REF_CMD, 0);
530
531 if (sdram_params->base.dramtype != LPDDR3)
532 clrbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
533
534 /* resume auto refresh */
535 writel(sdram_params->pctl_timing.trefi, &pctl->trefi);
536
537 return ret;
538}
539
540static void move_to_access_state(const struct chan_info *chan)
541{
542 struct rk3288_ddr_publ *publ = chan->publ;
543 struct rk3288_ddr_pctl *pctl = chan->pctl;
544 unsigned int state;
545
546 while (1) {
547 state = readl(&pctl->stat) & PCTL_STAT_MSK;
548
549 switch (state) {
550 case LOW_POWER:
551 if (((readl(&pctl->stat) >> LP_TRIG_SHIFT) &
552 LP_TRIG_MASK) == 1)
553 return;
554
555 writel(WAKEUP_STATE, &pctl->sctl);
556 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != ACCESS)
557 ;
558 /* wait DLL lock */
559 while ((readl(&publ->pgsr) & PGSR_DLDONE)
560 != PGSR_DLDONE)
561 ;
562 break;
563 case INIT_MEM:
564 writel(CFG_STATE, &pctl->sctl);
565 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
566 ;
567 case CONFIG:
568 writel(GO_STATE, &pctl->sctl);
569 while ((readl(&pctl->stat) & PCTL_STAT_MSK) == CONFIG)
570 ;
571 break;
572 case ACCESS:
573 return;
574 default:
575 break;
576 }
577 }
578}
579
580static void dram_cfg_rbc(const struct chan_info *chan, u32 chnum,
Kever Yangcdcb91c2016-10-07 17:47:58 +0800581 struct rk3288_sdram_params *sdram_params)
Simon Glass0aced102015-08-30 16:55:36 -0600582{
583 struct rk3288_ddr_publ *publ = chan->publ;
584
585 if (sdram_params->ch[chnum].bk == 3)
586 clrsetbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT,
587 1 << PDQ_SHIFT);
588 else
589 clrbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT);
590
591 writel(sdram_params->base.ddrconfig, &chan->msch->ddrconf);
592}
593
594static void dram_all_config(const struct dram_info *dram,
Kever Yangcdcb91c2016-10-07 17:47:58 +0800595 struct rk3288_sdram_params *sdram_params)
Simon Glass0aced102015-08-30 16:55:36 -0600596{
597 unsigned int chan;
598 u32 sys_reg = 0;
599
600 sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
601 sys_reg |= (sdram_params->num_channels - 1) << SYS_REG_NUM_CH_SHIFT;
602 for (chan = 0; chan < sdram_params->num_channels; chan++) {
603 const struct rk3288_sdram_channel *info =
604 &sdram_params->ch[chan];
605
606 sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(chan);
John Keeping6091f912016-07-15 17:33:23 +0100607 sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(chan);
Simon Glass0aced102015-08-30 16:55:36 -0600608 sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(chan);
609 sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(chan);
John Keeping6091f912016-07-15 17:33:23 +0100610 sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(chan);
Simon Glass0aced102015-08-30 16:55:36 -0600611 sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(chan);
612 sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(chan);
John Keeping6091f912016-07-15 17:33:23 +0100613 sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(chan);
614 sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(chan);
Simon Glass0aced102015-08-30 16:55:36 -0600615
616 dram_cfg_rbc(&dram->chan[chan], chan, sdram_params);
617 }
618 writel(sys_reg, &dram->pmu->sys_reg[2]);
Simon Glassa7376f02016-01-21 19:44:01 -0700619 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, sdram_params->base.stride);
Simon Glass0aced102015-08-30 16:55:36 -0600620}
Kever Yangcdcb91c2016-10-07 17:47:58 +0800621
622static int sdram_rank_bw_detect(struct dram_info *dram, int channel,
623 struct rk3288_sdram_params *sdram_params)
624{
625 int reg;
626 int need_trainig = 0;
627 const struct chan_info *chan = &dram->chan[channel];
628 struct rk3288_ddr_publ *publ = chan->publ;
629
Heiko Stübner493cc0d2017-02-18 19:46:24 +0100630 if (data_training(chan, channel, sdram_params) < 0) {
Kever Yangcdcb91c2016-10-07 17:47:58 +0800631 reg = readl(&publ->datx8[0].dxgsr[0]);
632 /* Check the result for rank 0 */
633 if ((channel == 0) && (reg & DQS_GATE_TRAINING_ERROR_RANK0)) {
634 debug("data training fail!\n");
Heiko Stübner493cc0d2017-02-18 19:46:24 +0100635 return -EIO;
Kever Yangcdcb91c2016-10-07 17:47:58 +0800636 } else if ((channel == 1) &&
637 (reg & DQS_GATE_TRAINING_ERROR_RANK0)) {
638 sdram_params->num_channels = 1;
639 }
640
641 /* Check the result for rank 1 */
642 if (reg & DQS_GATE_TRAINING_ERROR_RANK1) {
643 sdram_params->ch[channel].rank = 1;
644 clrsetbits_le32(&publ->pgcr, 0xF << 18,
645 sdram_params->ch[channel].rank << 18);
646 need_trainig = 1;
647 }
648 reg = readl(&publ->datx8[2].dxgsr[0]);
649 if (reg & (1 << 4)) {
650 sdram_params->ch[channel].bw = 1;
651 set_bandwidth_ratio(chan, channel,
652 sdram_params->ch[channel].bw,
653 dram->grf);
654 need_trainig = 1;
655 }
656 }
657 /* Assume the Die bit width are the same with the chip bit width */
658 sdram_params->ch[channel].dbw = sdram_params->ch[channel].bw;
659
660 if (need_trainig &&
Heiko Stübner493cc0d2017-02-18 19:46:24 +0100661 (data_training(chan, channel, sdram_params) < 0)) {
Kever Yangcdcb91c2016-10-07 17:47:58 +0800662 if (sdram_params->base.dramtype == LPDDR3) {
663 ddr_phy_ctl_reset(dram->cru, channel, 1);
664 udelay(10);
665 ddr_phy_ctl_reset(dram->cru, channel, 0);
666 udelay(10);
667 }
668 debug("2nd data training failed!");
669 return -EIO;
670 }
671
672 return 0;
673}
674
675static int sdram_col_row_detect(struct dram_info *dram, int channel,
676 struct rk3288_sdram_params *sdram_params)
677{
678 int row, col;
679 unsigned int addr;
680 const struct chan_info *chan = &dram->chan[channel];
681 struct rk3288_ddr_pctl *pctl = chan->pctl;
682 struct rk3288_ddr_publ *publ = chan->publ;
683 int ret = 0;
684
685 /* Detect col */
686 for (col = 11; col >= 9; col--) {
Tom Rinibb4dd962022-11-16 13:10:37 -0500687 writel(0, CFG_SYS_SDRAM_BASE);
688 addr = CFG_SYS_SDRAM_BASE +
Kever Yangcdcb91c2016-10-07 17:47:58 +0800689 (1 << (col + sdram_params->ch[channel].bw - 1));
690 writel(TEST_PATTEN, addr);
691 if ((readl(addr) == TEST_PATTEN) &&
Tom Rinibb4dd962022-11-16 13:10:37 -0500692 (readl(CFG_SYS_SDRAM_BASE) == 0))
Kever Yangcdcb91c2016-10-07 17:47:58 +0800693 break;
694 }
695 if (col == 8) {
696 printf("Col detect error\n");
697 ret = -EINVAL;
698 goto out;
699 } else {
700 sdram_params->ch[channel].col = col;
701 }
702
703 move_to_config_state(publ, pctl);
704 writel(4, &chan->msch->ddrconf);
705 move_to_access_state(chan);
706 /* Detect row*/
707 for (row = 16; row >= 12; row--) {
Tom Rinibb4dd962022-11-16 13:10:37 -0500708 writel(0, CFG_SYS_SDRAM_BASE);
709 addr = CFG_SYS_SDRAM_BASE + (1 << (row + 15 - 1));
Kever Yangcdcb91c2016-10-07 17:47:58 +0800710 writel(TEST_PATTEN, addr);
711 if ((readl(addr) == TEST_PATTEN) &&
Tom Rinibb4dd962022-11-16 13:10:37 -0500712 (readl(CFG_SYS_SDRAM_BASE) == 0))
Kever Yangcdcb91c2016-10-07 17:47:58 +0800713 break;
714 }
715 if (row == 11) {
716 printf("Row detect error\n");
717 ret = -EINVAL;
718 } else {
719 sdram_params->ch[channel].cs1_row = row;
720 sdram_params->ch[channel].row_3_4 = 0;
721 debug("chn %d col %d, row %d\n", channel, col, row);
722 sdram_params->ch[channel].cs0_row = row;
723 }
724
725out:
726 return ret;
727}
Simon Glass0aced102015-08-30 16:55:36 -0600728
Kever Yangcdcb91c2016-10-07 17:47:58 +0800729static int sdram_get_niu_config(struct rk3288_sdram_params *sdram_params)
730{
731 int i, tmp, size, ret = 0;
732
733 tmp = sdram_params->ch[0].col - 9;
734 tmp -= (sdram_params->ch[0].bw == 2) ? 0 : 1;
735 tmp |= ((sdram_params->ch[0].cs0_row - 12) << 4);
736 size = sizeof(ddrconf_table)/sizeof(ddrconf_table[0]);
737 for (i = 0; i < size; i++)
738 if (tmp == ddrconf_table[i])
739 break;
740 if (i >= size) {
741 printf("niu config not found\n");
742 ret = -EINVAL;
743 } else {
744 sdram_params->base.ddrconfig = i;
745 }
746
747 return ret;
748}
749
750static int sdram_get_stride(struct rk3288_sdram_params *sdram_params)
751{
752 int stride = -1;
753 int ret = 0;
754 long cap = sdram_params->num_channels * (1u <<
755 (sdram_params->ch[0].cs0_row +
756 sdram_params->ch[0].col +
757 (sdram_params->ch[0].rank - 1) +
758 sdram_params->ch[0].bw +
759 3 - 20));
760
761 switch (cap) {
762 case 512:
763 stride = 0;
764 break;
765 case 1024:
766 stride = 5;
767 break;
768 case 2048:
769 stride = 9;
770 break;
771 case 4096:
772 stride = 0xd;
773 break;
774 default:
775 stride = -1;
776 printf("could not find correct stride, cap error!\n");
777 ret = -EINVAL;
778 break;
779 }
780 sdram_params->base.stride = stride;
781
782 return ret;
783}
784
Stephen Warrena9622432016-06-17 09:44:00 -0600785static int sdram_init(struct dram_info *dram,
Kever Yangcdcb91c2016-10-07 17:47:58 +0800786 struct rk3288_sdram_params *sdram_params)
Simon Glass0aced102015-08-30 16:55:36 -0600787{
788 int channel;
789 int zqcr;
790 int ret;
791
792 debug("%s start\n", __func__);
793 if ((sdram_params->base.dramtype == DDR3 &&
794 sdram_params->base.ddr_freq > 800000000) ||
795 (sdram_params->base.dramtype == LPDDR3 &&
796 sdram_params->base.ddr_freq > 533000000)) {
797 debug("SDRAM frequency is too high!");
798 return -E2BIG;
799 }
800
Stephen Warrena9622432016-06-17 09:44:00 -0600801 debug("ddr clk dpll\n");
802 ret = clk_set_rate(&dram->ddr_clk, sdram_params->base.ddr_freq);
Simon Glass0aced102015-08-30 16:55:36 -0600803 debug("ret=%d\n", ret);
804 if (ret) {
805 debug("Could not set DDR clock\n");
806 return ret;
807 }
808
809 for (channel = 0; channel < 2; channel++) {
810 const struct chan_info *chan = &dram->chan[channel];
811 struct rk3288_ddr_pctl *pctl = chan->pctl;
812 struct rk3288_ddr_publ *publ = chan->publ;
813
Kever Yangcdcb91c2016-10-07 17:47:58 +0800814 /* map all the 4GB space to the current channel */
815 if (channel)
816 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, 0x17);
817 else
818 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, 0x1a);
Simon Glass0aced102015-08-30 16:55:36 -0600819 phy_pctrl_reset(dram->cru, publ, channel);
820 phy_dll_bypass_set(publ, sdram_params->base.ddr_freq);
821
Simon Glass0aced102015-08-30 16:55:36 -0600822 dfi_cfg(pctl, sdram_params->base.dramtype);
823
824 pctl_cfg(channel, pctl, sdram_params, dram->grf);
825
826 phy_cfg(chan, channel, sdram_params);
827
828 phy_init(publ);
829
830 writel(POWER_UP_START, &pctl->powctl);
831 while (!(readl(&pctl->powstat) & POWER_UP_DONE))
832 ;
833
834 memory_init(publ, sdram_params->base.dramtype);
835 move_to_config_state(publ, pctl);
836
837 if (sdram_params->base.dramtype == LPDDR3) {
838 send_command(pctl, 3, DESELECT_CMD, 0);
839 udelay(1);
840 send_command(pctl, 3, PREA_CMD, 0);
841 udelay(1);
842 send_command_op(pctl, 3, MRS_CMD, 63, 0xfc);
843 udelay(1);
844 send_command_op(pctl, 3, MRS_CMD, 1,
845 sdram_params->phy_timing.mr[1]);
846 udelay(1);
847 send_command_op(pctl, 3, MRS_CMD, 2,
848 sdram_params->phy_timing.mr[2]);
849 udelay(1);
850 send_command_op(pctl, 3, MRS_CMD, 3,
851 sdram_params->phy_timing.mr[3]);
852 udelay(1);
853 }
854
Kever Yangcdcb91c2016-10-07 17:47:58 +0800855 /* Using 32bit bus width for detect */
856 sdram_params->ch[channel].bw = 2;
Simon Glass0aced102015-08-30 16:55:36 -0600857 set_bandwidth_ratio(chan, channel,
858 sdram_params->ch[channel].bw, dram->grf);
859 /*
Kever Yangcdcb91c2016-10-07 17:47:58 +0800860 * set cs, using n=3 for detect
Simon Glass0aced102015-08-30 16:55:36 -0600861 * CS0, n=1
862 * CS1, n=2
863 * CS0 & CS1, n = 3
864 */
Johan Jonker3251d3f2022-01-12 17:32:11 +0100865 sdram_params->ch[channel].rank = 2;
Simon Glass0aced102015-08-30 16:55:36 -0600866 clrsetbits_le32(&publ->pgcr, 0xF << 18,
867 (sdram_params->ch[channel].rank | 1) << 18);
Kever Yangcdcb91c2016-10-07 17:47:58 +0800868
Simon Glass0aced102015-08-30 16:55:36 -0600869 /* DS=40ohm,ODT=155ohm */
870 zqcr = 1 << ZDEN_SHIFT | 2 << PU_ONDIE_SHIFT |
871 2 << PD_ONDIE_SHIFT | 0x19 << PU_OUTPUT_SHIFT |
872 0x19 << PD_OUTPUT_SHIFT;
873 writel(zqcr, &publ->zq1cr[0]);
874 writel(zqcr, &publ->zq0cr[0]);
875
876 if (sdram_params->base.dramtype == LPDDR3) {
877 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
878 udelay(10);
879 send_command_op(pctl,
880 sdram_params->ch[channel].rank | 1,
881 MRS_CMD, 11,
882 sdram_params->base.odt ? 3 : 0);
883 if (channel == 0) {
884 writel(0, &pctl->mrrcfg0);
885 send_command_op(pctl, 1, MRR_CMD, 8, 0);
886 /* S8 */
887 if ((readl(&pctl->mrrstat0) & 0x3) != 3) {
888 debug("failed!");
889 return -EREMOTEIO;
890 }
891 }
892 }
893
Kever Yangcdcb91c2016-10-07 17:47:58 +0800894 /* Detect the rank and bit-width with data-training */
895 sdram_rank_bw_detect(dram, channel, sdram_params);
Simon Glass0aced102015-08-30 16:55:36 -0600896
897 if (sdram_params->base.dramtype == LPDDR3) {
898 u32 i;
899 writel(0, &pctl->mrrcfg0);
900 for (i = 0; i < 17; i++)
901 send_command_op(pctl, 1, MRR_CMD, i, 0);
902 }
Kever Yangcdcb91c2016-10-07 17:47:58 +0800903 writel(15, &chan->msch->ddrconf);
Simon Glass0aced102015-08-30 16:55:36 -0600904 move_to_access_state(chan);
Kever Yangcdcb91c2016-10-07 17:47:58 +0800905 /* DDR3 and LPDDR3 are always 8 bank, no need detect */
906 sdram_params->ch[channel].bk = 3;
907 /* Detect Col and Row number*/
908 ret = sdram_col_row_detect(dram, channel, sdram_params);
909 if (ret)
910 goto error;
Simon Glass0aced102015-08-30 16:55:36 -0600911 }
Kever Yangcdcb91c2016-10-07 17:47:58 +0800912 /* Find NIU DDR configuration */
913 ret = sdram_get_niu_config(sdram_params);
914 if (ret)
915 goto error;
916 /* Find stride setting */
917 ret = sdram_get_stride(sdram_params);
918 if (ret)
919 goto error;
920
Simon Glass0aced102015-08-30 16:55:36 -0600921 dram_all_config(dram, sdram_params);
922 debug("%s done\n", __func__);
923
924 return 0;
Kever Yangcdcb91c2016-10-07 17:47:58 +0800925error:
926 printf("DRAM init failed!\n");
927 hang();
Simon Glass0aced102015-08-30 16:55:36 -0600928}
Simon Glass0aced102015-08-30 16:55:36 -0600929
Simon Glass94906e42016-01-21 19:45:17 -0700930# ifdef CONFIG_ROCKCHIP_FAST_SPL
931static int veyron_init(struct dram_info *priv)
932{
933 struct udevice *pmic;
934 int ret;
935
Simon Glassc7298e72016-02-11 13:23:26 -0700936 ret = uclass_first_device_err(UCLASS_PMIC, &pmic);
Simon Glass94906e42016-01-21 19:45:17 -0700937 if (ret)
938 return ret;
939
940 /* Slowly raise to max CPU voltage to prevent overshoot */
Jacob Chen614704b2017-05-02 14:54:52 +0800941 ret = rk8xx_spl_configure_buck(pmic, 1, 1200000);
Simon Glass94906e42016-01-21 19:45:17 -0700942 if (ret)
943 return ret;
944 udelay(175);/* Must wait for voltage to stabilize, 2mV/us */
Jacob Chen614704b2017-05-02 14:54:52 +0800945 ret = rk8xx_spl_configure_buck(pmic, 1, 1400000);
Simon Glass94906e42016-01-21 19:45:17 -0700946 if (ret)
947 return ret;
948 udelay(100);/* Must wait for voltage to stabilize, 2mV/us */
949
Heiko Stübner1bd4a542016-07-16 00:17:16 +0200950 rk3288_clk_configure_cpu(priv->cru, priv->grf);
Simon Glass94906e42016-01-21 19:45:17 -0700951
952 return 0;
953}
954# endif
955
Simon Glass0aced102015-08-30 16:55:36 -0600956static int setup_sdram(struct udevice *dev)
957{
958 struct dram_info *priv = dev_get_priv(dev);
Simon Glassfa20e932020-12-03 16:55:20 -0700959 struct rk3288_sdram_params *params = dev_get_plat(dev);
Simon Glass8e6af6f2016-07-04 11:58:34 -0600960
961# ifdef CONFIG_ROCKCHIP_FAST_SPL
962 if (priv->is_veyron) {
963 int ret;
964
965 ret = veyron_init(priv);
966 if (ret)
967 return ret;
968 }
969# endif
970
971 return sdram_init(priv, params);
972}
973
Simon Glassaad29ae2020-12-03 16:55:21 -0700974static int rk3288_dmc_of_to_plat(struct udevice *dev)
Simon Glass8e6af6f2016-07-04 11:58:34 -0600975{
Simon Glassfa20e932020-12-03 16:55:20 -0700976 struct rk3288_sdram_params *params = dev_get_plat(dev);
Kever Yangcdcb91c2016-10-07 17:47:58 +0800977 int ret;
Simon Glass0aced102015-08-30 16:55:36 -0600978
Simon Glass6d70ba02021-08-07 07:24:06 -0600979 if (!CONFIG_IS_ENABLED(OF_REAL))
980 return 0;
981
Kever Yangcdcb91c2016-10-07 17:47:58 +0800982 /* Rk3288 supports dual-channel, set default channel num to 2 */
983 params->num_channels = 2;
Philipp Tomsich0250c232017-06-07 18:46:03 +0200984 ret = dev_read_u32_array(dev, "rockchip,pctl-timing",
985 (u32 *)&params->pctl_timing,
986 sizeof(params->pctl_timing) / sizeof(u32));
Simon Glass0aced102015-08-30 16:55:36 -0600987 if (ret) {
988 debug("%s: Cannot read rockchip,pctl-timing\n", __func__);
989 return -EINVAL;
990 }
Philipp Tomsich0250c232017-06-07 18:46:03 +0200991 ret = dev_read_u32_array(dev, "rockchip,phy-timing",
992 (u32 *)&params->phy_timing,
993 sizeof(params->phy_timing) / sizeof(u32));
Simon Glass0aced102015-08-30 16:55:36 -0600994 if (ret) {
995 debug("%s: Cannot read rockchip,phy-timing\n", __func__);
996 return -EINVAL;
997 }
Philipp Tomsich0250c232017-06-07 18:46:03 +0200998 ret = dev_read_u32_array(dev, "rockchip,sdram-params",
999 (u32 *)&params->base,
1000 sizeof(params->base) / sizeof(u32));
Simon Glass0aced102015-08-30 16:55:36 -06001001 if (ret) {
1002 debug("%s: Cannot read rockchip,sdram-params\n", __func__);
1003 return -EINVAL;
1004 }
Simon Glass8e6af6f2016-07-04 11:58:34 -06001005#ifdef CONFIG_ROCKCHIP_FAST_SPL
1006 struct dram_info *priv = dev_get_priv(dev);
Simon Glass94906e42016-01-21 19:45:17 -07001007
Simon Glass8e6af6f2016-07-04 11:58:34 -06001008 priv->is_veyron = !fdt_node_check_compatible(blob, 0, "google,veyron");
1009#endif
Masahiro Yamadae4873e32018-04-19 12:14:03 +09001010 ret = regmap_init_mem(dev_ofnode(dev), &params->map);
Simon Glass8e6af6f2016-07-04 11:58:34 -06001011 if (ret)
1012 return ret;
Simon Glass0aced102015-08-30 16:55:36 -06001013
Simon Glass8e6af6f2016-07-04 11:58:34 -06001014 return 0;
Simon Glass0aced102015-08-30 16:55:36 -06001015}
Simon Glass8e6af6f2016-07-04 11:58:34 -06001016#endif /* CONFIG_SPL_BUILD */
Simon Glass0aced102015-08-30 16:55:36 -06001017
Simon Glass4d1192c2016-07-04 11:58:35 -06001018#if CONFIG_IS_ENABLED(OF_PLATDATA)
Simon Glassb75b15b2020-12-03 16:55:23 -07001019static int conv_of_plat(struct udevice *dev)
Simon Glass4d1192c2016-07-04 11:58:35 -06001020{
Simon Glassfa20e932020-12-03 16:55:20 -07001021 struct rk3288_sdram_params *plat = dev_get_plat(dev);
Simon Glass4d1192c2016-07-04 11:58:35 -06001022 struct dtd_rockchip_rk3288_dmc *of_plat = &plat->of_plat;
Kever Yangcdcb91c2016-10-07 17:47:58 +08001023 int ret;
Simon Glass4d1192c2016-07-04 11:58:35 -06001024
Simon Glass4d1192c2016-07-04 11:58:35 -06001025 memcpy(&plat->pctl_timing, of_plat->rockchip_pctl_timing,
1026 sizeof(plat->pctl_timing));
1027 memcpy(&plat->phy_timing, of_plat->rockchip_phy_timing,
1028 sizeof(plat->phy_timing));
1029 memcpy(&plat->base, of_plat->rockchip_sdram_params, sizeof(plat->base));
Kever Yangcdcb91c2016-10-07 17:47:58 +08001030 /* Rk3288 supports dual-channel, set default channel num to 2 */
1031 plat->num_channels = 2;
Johan Jonker2e304a22023-03-13 01:30:46 +01001032 ret = regmap_init_mem_plat(dev, of_plat->reg, sizeof(of_plat->reg[0]),
Simon Glassb75b15b2020-12-03 16:55:23 -07001033 ARRAY_SIZE(of_plat->reg) / 2, &plat->map);
Simon Glass4d1192c2016-07-04 11:58:35 -06001034 if (ret)
1035 return ret;
1036
1037 return 0;
1038}
1039#endif
1040
Simon Glass0aced102015-08-30 16:55:36 -06001041static int rk3288_dmc_probe(struct udevice *dev)
1042{
Jagan Teki395c2e72019-09-17 11:40:38 +05301043#if defined(CONFIG_TPL_BUILD) || \
1044 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
Simon Glassfa20e932020-12-03 16:55:20 -07001045 struct rk3288_sdram_params *plat = dev_get_plat(dev);
Kever Yang6c15a542017-06-23 16:11:06 +08001046 struct udevice *dev_clk;
Simon Glass0aced102015-08-30 16:55:36 -06001047 struct regmap *map;
1048 int ret;
Kever Yang6c15a542017-06-23 16:11:06 +08001049#endif
1050 struct dram_info *priv = dev_get_priv(dev);
Simon Glass0aced102015-08-30 16:55:36 -06001051
Kever Yang6c15a542017-06-23 16:11:06 +08001052 priv->pmu = syscon_get_first_range(ROCKCHIP_SYSCON_PMU);
Jagan Teki395c2e72019-09-17 11:40:38 +05301053#if defined(CONFIG_TPL_BUILD) || \
1054 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
Simon Glass4d1192c2016-07-04 11:58:35 -06001055#if CONFIG_IS_ENABLED(OF_PLATDATA)
Simon Glassb75b15b2020-12-03 16:55:23 -07001056 ret = conv_of_plat(dev);
Simon Glass4d1192c2016-07-04 11:58:35 -06001057 if (ret)
1058 return ret;
1059#endif
Simon Glass0aced102015-08-30 16:55:36 -06001060 map = syscon_get_regmap_by_driver_data(ROCKCHIP_SYSCON_NOC);
1061 if (IS_ERR(map))
1062 return PTR_ERR(map);
1063 priv->chan[0].msch = regmap_get_range(map, 0);
1064 priv->chan[1].msch = (struct rk3288_msch *)
1065 (regmap_get_range(map, 0) + 0x80);
1066
Simon Glass3fee8c42016-01-21 19:45:11 -07001067 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
1068 priv->sgrf = syscon_get_first_range(ROCKCHIP_SYSCON_SGRF);
Simon Glass0aced102015-08-30 16:55:36 -06001069
Simon Glass8e6af6f2016-07-04 11:58:34 -06001070 priv->chan[0].pctl = regmap_get_range(plat->map, 0);
1071 priv->chan[0].publ = regmap_get_range(plat->map, 1);
1072 priv->chan[1].pctl = regmap_get_range(plat->map, 2);
1073 priv->chan[1].publ = regmap_get_range(plat->map, 3);
Kever Yang6c15a542017-06-23 16:11:06 +08001074
Simon Glassae8fe412016-07-17 15:23:17 -06001075 ret = rockchip_get_clk(&dev_clk);
Stephen Warrena9622432016-06-17 09:44:00 -06001076 if (ret)
1077 return ret;
1078 priv->ddr_clk.id = CLK_DDR;
1079 ret = clk_request(dev_clk, &priv->ddr_clk);
Simon Glass0aced102015-08-30 16:55:36 -06001080 if (ret)
1081 return ret;
1082
1083 priv->cru = rockchip_get_cru();
1084 if (IS_ERR(priv->cru))
1085 return PTR_ERR(priv->cru);
Simon Glass0aced102015-08-30 16:55:36 -06001086 ret = setup_sdram(dev);
1087 if (ret)
1088 return ret;
Kever Yang6c15a542017-06-23 16:11:06 +08001089#else
Tom Rinibb4dd962022-11-16 13:10:37 -05001090 priv->info.base = CFG_SYS_SDRAM_BASE;
Kever Yang6c15a542017-06-23 16:11:06 +08001091 priv->info.size = rockchip_sdram_size(
1092 (phys_addr_t)&priv->pmu->sys_reg[2]);
Simon Glass0aced102015-08-30 16:55:36 -06001093#endif
Simon Glass0aced102015-08-30 16:55:36 -06001094
1095 return 0;
1096}
1097
1098static int rk3288_dmc_get_info(struct udevice *dev, struct ram_info *info)
1099{
1100 struct dram_info *priv = dev_get_priv(dev);
1101
1102 *info = priv->info;
1103
1104 return 0;
1105}
1106
1107static struct ram_ops rk3288_dmc_ops = {
1108 .get_info = rk3288_dmc_get_info,
1109};
1110
1111static const struct udevice_id rk3288_dmc_ids[] = {
1112 { .compatible = "rockchip,rk3288-dmc" },
1113 { }
1114};
1115
Walter Lozano2901ac62020-06-25 01:10:04 -03001116U_BOOT_DRIVER(rockchip_rk3288_dmc) = {
Simon Glass4d1192c2016-07-04 11:58:35 -06001117 .name = "rockchip_rk3288_dmc",
Simon Glass0aced102015-08-30 16:55:36 -06001118 .id = UCLASS_RAM,
1119 .of_match = rk3288_dmc_ids,
1120 .ops = &rk3288_dmc_ops,
Jagan Teki395c2e72019-09-17 11:40:38 +05301121#if defined(CONFIG_TPL_BUILD) || \
1122 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
Simon Glassaad29ae2020-12-03 16:55:21 -07001123 .of_to_plat = rk3288_dmc_of_to_plat,
Simon Glass8e6af6f2016-07-04 11:58:34 -06001124#endif
Simon Glass0aced102015-08-30 16:55:36 -06001125 .probe = rk3288_dmc_probe,
Simon Glass8a2b47f2020-12-03 16:55:17 -07001126 .priv_auto = sizeof(struct dram_info),
Jagan Teki395c2e72019-09-17 11:40:38 +05301127#if defined(CONFIG_TPL_BUILD) || \
1128 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
Simon Glass71fa5b42020-12-03 16:55:18 -07001129 .plat_auto = sizeof(struct rk3288_sdram_params),
Simon Glass8e6af6f2016-07-04 11:58:34 -06001130#endif
Simon Glass0aced102015-08-30 16:55:36 -06001131};