blob: 940ee58a642ccd9dd1b59c87ae6c8257c6eb291a [file] [log] [blame]
Simon Glass0aced102015-08-30 16:55:36 -06001/*
2 * (C) Copyright 2015 Google, Inc
3 * Copyright 2014 Rockchip Inc.
4 *
5 * SPDX-License-Identifier: GPL-2.0
6 *
7 * Adapted from coreboot.
8 */
9
10#include <common.h>
11#include <clk.h>
12#include <dm.h>
13#include <errno.h>
14#include <ram.h>
15#include <regmap.h>
16#include <syscon.h>
17#include <asm/io.h>
18#include <asm/arch/clock.h>
19#include <asm/arch/cru_rk3288.h>
20#include <asm/arch/ddr_rk3288.h>
21#include <asm/arch/grf_rk3288.h>
22#include <asm/arch/pmu_rk3288.h>
23#include <asm/arch/sdram.h>
24#include <linux/err.h>
Simon Glass94906e42016-01-21 19:45:17 -070025#include <power/regulator.h>
26#include <power/rk808_pmic.h>
Simon Glass0aced102015-08-30 16:55:36 -060027
28DECLARE_GLOBAL_DATA_PTR;
29
30struct chan_info {
31 struct rk3288_ddr_pctl *pctl;
32 struct rk3288_ddr_publ *publ;
33 struct rk3288_msch *msch;
34};
35
36struct dram_info {
37 struct chan_info chan[2];
38 struct ram_info info;
Stephen Warrena9622432016-06-17 09:44:00 -060039 struct clk ddr_clk;
Simon Glass0aced102015-08-30 16:55:36 -060040 struct rk3288_cru *cru;
41 struct rk3288_grf *grf;
42 struct rk3288_sgrf *sgrf;
43 struct rk3288_pmu *pmu;
Simon Glass8e6af6f2016-07-04 11:58:34 -060044 bool is_veyron;
Simon Glass0aced102015-08-30 16:55:36 -060045};
46
Simon Glass8e6af6f2016-07-04 11:58:34 -060047struct rk3288_sdram_params {
48 struct rk3288_sdram_channel ch[2];
49 struct rk3288_sdram_pctl_timing pctl_timing;
50 struct rk3288_sdram_phy_timing phy_timing;
51 struct rk3288_base_params base;
52 int num_channels;
53 struct regmap *map;
54};
55
Simon Glass0aced102015-08-30 16:55:36 -060056#ifdef CONFIG_SPL_BUILD
57static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
58{
59 int i;
60
61 for (i = 0; i < n / sizeof(u32); i++) {
62 writel(*src, dest);
63 src++;
64 dest++;
65 }
66}
67
68static void ddr_reset(struct rk3288_cru *cru, u32 ch, u32 ctl, u32 phy)
69{
70 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
71 u32 ctl_psrstn_shift = 3 + 5 * ch;
72 u32 ctl_srstn_shift = 2 + 5 * ch;
73 u32 phy_psrstn_shift = 1 + 5 * ch;
74 u32 phy_srstn_shift = 5 * ch;
75
76 rk_clrsetreg(&cru->cru_softrst_con[10],
77 1 << phy_ctl_srstn_shift | 1 << ctl_psrstn_shift |
78 1 << ctl_srstn_shift | 1 << phy_psrstn_shift |
79 1 << phy_srstn_shift,
80 phy << phy_ctl_srstn_shift | ctl << ctl_psrstn_shift |
81 ctl << ctl_srstn_shift | phy << phy_psrstn_shift |
82 phy << phy_srstn_shift);
83}
84
85static void ddr_phy_ctl_reset(struct rk3288_cru *cru, u32 ch, u32 n)
86{
87 u32 phy_ctl_srstn_shift = 4 + 5 * ch;
88
89 rk_clrsetreg(&cru->cru_softrst_con[10],
90 1 << phy_ctl_srstn_shift, n << phy_ctl_srstn_shift);
91}
92
93static void phy_pctrl_reset(struct rk3288_cru *cru,
94 struct rk3288_ddr_publ *publ,
95 u32 channel)
96{
97 int i;
98
99 ddr_reset(cru, channel, 1, 1);
100 udelay(1);
101 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
102 for (i = 0; i < 4; i++)
103 clrbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
104
105 udelay(10);
106 setbits_le32(&publ->acdllcr, ACDLLCR_DLLSRST);
107 for (i = 0; i < 4; i++)
108 setbits_le32(&publ->datx8[i].dxdllcr, DXDLLCR_DLLSRST);
109
110 udelay(10);
111 ddr_reset(cru, channel, 1, 0);
112 udelay(10);
113 ddr_reset(cru, channel, 0, 0);
114 udelay(10);
115}
116
117static void phy_dll_bypass_set(struct rk3288_ddr_publ *publ,
118 u32 freq)
119{
120 int i;
121 if (freq <= 250000000) {
122 if (freq <= 150000000)
123 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
124 else
125 setbits_le32(&publ->dllgcr, SBIAS_BYPASS);
126 setbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
127 for (i = 0; i < 4; i++)
128 setbits_le32(&publ->datx8[i].dxdllcr,
129 DXDLLCR_DLLDIS);
130
131 setbits_le32(&publ->pir, PIR_DLLBYP);
132 } else {
133 clrbits_le32(&publ->dllgcr, SBIAS_BYPASS);
134 clrbits_le32(&publ->acdllcr, ACDLLCR_DLLDIS);
135 for (i = 0; i < 4; i++) {
136 clrbits_le32(&publ->datx8[i].dxdllcr,
137 DXDLLCR_DLLDIS);
138 }
139
140 clrbits_le32(&publ->pir, PIR_DLLBYP);
141 }
142}
143
144static void dfi_cfg(struct rk3288_ddr_pctl *pctl, u32 dramtype)
145{
146 writel(DFI_INIT_START, &pctl->dfistcfg0);
147 writel(DFI_DRAM_CLK_SR_EN | DFI_DRAM_CLK_DPD_EN,
148 &pctl->dfistcfg1);
149 writel(DFI_PARITY_INTR_EN | DFI_PARITY_EN, &pctl->dfistcfg2);
150 writel(7 << TLP_RESP_TIME_SHIFT | LP_SR_EN | LP_PD_EN,
151 &pctl->dfilpcfg0);
152
153 writel(2 << TCTRL_DELAY_TIME_SHIFT, &pctl->dfitctrldelay);
154 writel(1 << TPHY_WRDATA_TIME_SHIFT, &pctl->dfitphywrdata);
155 writel(0xf << TPHY_RDLAT_TIME_SHIFT, &pctl->dfitphyrdlat);
156 writel(2 << TDRAM_CLK_DIS_TIME_SHIFT, &pctl->dfitdramclkdis);
157 writel(2 << TDRAM_CLK_EN_TIME_SHIFT, &pctl->dfitdramclken);
158 writel(1, &pctl->dfitphyupdtype0);
159
160 /* cs0 and cs1 write odt enable */
161 writel((RANK0_ODT_WRITE_SEL | RANK1_ODT_WRITE_SEL),
162 &pctl->dfiodtcfg);
163 /* odt write length */
164 writel(7 << ODT_LEN_BL8_W_SHIFT, &pctl->dfiodtcfg1);
165 /* phyupd and ctrlupd disabled */
166 writel(0, &pctl->dfiupdcfg);
167}
168
169static void ddr_set_enable(struct rk3288_grf *grf, uint channel, bool enable)
170{
171 uint val = 0;
172
173 if (enable) {
174 val = 1 << (channel ? DDR1_16BIT_EN_SHIFT :
175 DDR0_16BIT_EN_SHIFT);
176 }
177 rk_clrsetreg(&grf->soc_con0,
178 1 << (channel ? DDR1_16BIT_EN_SHIFT : DDR0_16BIT_EN_SHIFT),
179 val);
180}
181
182static void ddr_set_ddr3_mode(struct rk3288_grf *grf, uint channel,
183 bool ddr3_mode)
184{
185 uint mask, val;
186
187 mask = 1 << (channel ? MSCH1_MAINDDR3_SHIFT : MSCH0_MAINDDR3_SHIFT);
188 val = ddr3_mode << (channel ? MSCH1_MAINDDR3_SHIFT :
189 MSCH0_MAINDDR3_SHIFT);
190 rk_clrsetreg(&grf->soc_con0, mask, val);
191}
192
193static void ddr_set_en_bst_odt(struct rk3288_grf *grf, uint channel,
194 bool enable, bool enable_bst, bool enable_odt)
195{
196 uint mask;
197 bool disable_bst = !enable_bst;
198
199 mask = channel ?
200 (1 << LPDDR3_EN1_SHIFT | 1 << UPCTL1_BST_DIABLE_SHIFT |
201 1 << UPCTL1_LPDDR3_ODT_EN_SHIFT) :
202 (1 << LPDDR3_EN0_SHIFT | 1 << UPCTL0_BST_DIABLE_SHIFT |
203 1 << UPCTL0_LPDDR3_ODT_EN_SHIFT);
204 rk_clrsetreg(&grf->soc_con2, mask,
205 enable << (channel ? LPDDR3_EN1_SHIFT : LPDDR3_EN0_SHIFT) |
206 disable_bst << (channel ? UPCTL1_BST_DIABLE_SHIFT :
207 UPCTL0_BST_DIABLE_SHIFT) |
208 enable_odt << (channel ? UPCTL1_LPDDR3_ODT_EN_SHIFT :
209 UPCTL0_LPDDR3_ODT_EN_SHIFT));
210}
211
212static void pctl_cfg(u32 channel, struct rk3288_ddr_pctl *pctl,
213 const struct rk3288_sdram_params *sdram_params,
214 struct rk3288_grf *grf)
215{
216 unsigned int burstlen;
217
218 burstlen = (sdram_params->base.noc_timing >> 18) & 0x7;
219 copy_to_reg(&pctl->togcnt1u, &sdram_params->pctl_timing.togcnt1u,
220 sizeof(sdram_params->pctl_timing));
221 switch (sdram_params->base.dramtype) {
222 case LPDDR3:
223 writel(sdram_params->pctl_timing.tcl - 1,
224 &pctl->dfitrddataen);
225 writel(sdram_params->pctl_timing.tcwl,
226 &pctl->dfitphywrlat);
227 burstlen >>= 1;
228 writel(LPDDR2_S4 | 0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
229 LPDDR2_EN | burstlen << BURSTLENGTH_SHIFT |
230 (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
231 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
232 &pctl->mcfg);
233 ddr_set_ddr3_mode(grf, channel, false);
234 ddr_set_enable(grf, channel, true);
235 ddr_set_en_bst_odt(grf, channel, true, false,
236 sdram_params->base.odt);
237 break;
238 case DDR3:
239 if (sdram_params->phy_timing.mr[1] & DDR3_DLL_DISABLE) {
240 writel(sdram_params->pctl_timing.tcl - 3,
241 &pctl->dfitrddataen);
242 } else {
243 writel(sdram_params->pctl_timing.tcl - 2,
244 &pctl->dfitrddataen);
245 }
246 writel(sdram_params->pctl_timing.tcwl - 1,
247 &pctl->dfitphywrlat);
248 writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT | DDR3_EN |
249 DDR2_DDR3_BL_8 | (6 - 4) << TFAW_SHIFT | PD_EXIT_SLOW |
250 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
251 &pctl->mcfg);
252 ddr_set_ddr3_mode(grf, channel, true);
253 ddr_set_enable(grf, channel, true);
254
255 ddr_set_en_bst_odt(grf, channel, false, true, false);
256 break;
257 }
258
259 setbits_le32(&pctl->scfg, 1);
260}
261
262static void phy_cfg(const struct chan_info *chan, u32 channel,
263 const struct rk3288_sdram_params *sdram_params)
264{
265 struct rk3288_ddr_publ *publ = chan->publ;
266 struct rk3288_msch *msch = chan->msch;
267 uint ddr_freq_mhz = sdram_params->base.ddr_freq / 1000000;
268 u32 dinit2, tmp;
269 int i;
270
271 dinit2 = DIV_ROUND_UP(ddr_freq_mhz * 200000, 1000);
272 /* DDR PHY Timing */
273 copy_to_reg(&publ->dtpr[0], &sdram_params->phy_timing.dtpr0,
274 sizeof(sdram_params->phy_timing));
275 writel(sdram_params->base.noc_timing, &msch->ddrtiming);
276 writel(0x3f, &msch->readlatency);
277 writel(sdram_params->base.noc_activate, &msch->activate);
278 writel(2 << BUSWRTORD_SHIFT | 2 << BUSRDTOWR_SHIFT |
279 1 << BUSRDTORD_SHIFT, &msch->devtodev);
280 writel(DIV_ROUND_UP(ddr_freq_mhz * 5120, 1000) << PRT_DLLLOCK_SHIFT |
281 DIV_ROUND_UP(ddr_freq_mhz * 50, 1000) << PRT_DLLSRST_SHIFT |
282 8 << PRT_ITMSRST_SHIFT, &publ->ptr[0]);
283 writel(DIV_ROUND_UP(ddr_freq_mhz * 500000, 1000) << PRT_DINIT0_SHIFT |
284 DIV_ROUND_UP(ddr_freq_mhz * 400, 1000) << PRT_DINIT1_SHIFT,
285 &publ->ptr[1]);
286 writel(min(dinit2, 0x1ffffU) << PRT_DINIT2_SHIFT |
287 DIV_ROUND_UP(ddr_freq_mhz * 1000, 1000) << PRT_DINIT3_SHIFT,
288 &publ->ptr[2]);
289
290 switch (sdram_params->base.dramtype) {
291 case LPDDR3:
292 clrsetbits_le32(&publ->pgcr, 0x1F,
293 0 << PGCR_DFTLMT_SHIFT |
294 0 << PGCR_DFTCMP_SHIFT |
295 1 << PGCR_DQSCFG_SHIFT |
296 0 << PGCR_ITMDMD_SHIFT);
297 /* DDRMODE select LPDDR3 */
298 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
299 DDRMD_LPDDR2_LPDDR3 << DDRMD_SHIFT);
300 clrsetbits_le32(&publ->dxccr,
301 DQSNRES_MASK << DQSNRES_SHIFT |
302 DQSRES_MASK << DQSRES_SHIFT,
303 4 << DQSRES_SHIFT | 0xc << DQSNRES_SHIFT);
304 tmp = readl(&publ->dtpr[1]);
305 tmp = ((tmp >> TDQSCKMAX_SHIFT) & TDQSCKMAX_MASK) -
306 ((tmp >> TDQSCK_SHIFT) & TDQSCK_MASK);
307 clrsetbits_le32(&publ->dsgcr,
308 DQSGE_MASK << DQSGE_SHIFT |
309 DQSGX_MASK << DQSGX_SHIFT,
310 tmp << DQSGE_SHIFT | tmp << DQSGX_SHIFT);
311 break;
312 case DDR3:
313 clrbits_le32(&publ->pgcr, 0x1f);
314 clrsetbits_le32(&publ->dcr, DDRMD_MASK << DDRMD_SHIFT,
315 DDRMD_DDR3 << DDRMD_SHIFT);
316 break;
317 }
318 if (sdram_params->base.odt) {
319 /*dynamic RTT enable */
320 for (i = 0; i < 4; i++)
321 setbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
322 } else {
323 /*dynamic RTT disable */
324 for (i = 0; i < 4; i++)
325 clrbits_le32(&publ->datx8[i].dxgcr, DQSRTT | DQRTT);
326 }
327}
328
329static void phy_init(struct rk3288_ddr_publ *publ)
330{
331 setbits_le32(&publ->pir, PIR_INIT | PIR_DLLSRST
332 | PIR_DLLLOCK | PIR_ZCAL | PIR_ITMSRST | PIR_CLRSR);
333 udelay(1);
334 while ((readl(&publ->pgsr) &
335 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE)) !=
336 (PGSR_IDONE | PGSR_DLDONE | PGSR_ZCDONE))
337 ;
338}
339
340static void send_command(struct rk3288_ddr_pctl *pctl, u32 rank,
341 u32 cmd, u32 arg)
342{
343 writel((START_CMD | (rank << 20) | arg | cmd), &pctl->mcmd);
344 udelay(1);
345 while (readl(&pctl->mcmd) & START_CMD)
346 ;
347}
348
349static inline void send_command_op(struct rk3288_ddr_pctl *pctl,
350 u32 rank, u32 cmd, u32 ma, u32 op)
351{
352 send_command(pctl, rank, cmd, (ma & LPDDR2_MA_MASK) << LPDDR2_MA_SHIFT |
353 (op & LPDDR2_OP_MASK) << LPDDR2_OP_SHIFT);
354}
355
356static void memory_init(struct rk3288_ddr_publ *publ,
357 u32 dramtype)
358{
359 setbits_le32(&publ->pir,
360 (PIR_INIT | PIR_DRAMINIT | PIR_LOCKBYP
361 | PIR_ZCALBYP | PIR_CLRSR | PIR_ICPC
362 | (dramtype == DDR3 ? PIR_DRAMRST : 0)));
363 udelay(1);
364 while ((readl(&publ->pgsr) & (PGSR_IDONE | PGSR_DLDONE))
365 != (PGSR_IDONE | PGSR_DLDONE))
366 ;
367}
368
369static void move_to_config_state(struct rk3288_ddr_publ *publ,
370 struct rk3288_ddr_pctl *pctl)
371{
372 unsigned int state;
373
374 while (1) {
375 state = readl(&pctl->stat) & PCTL_STAT_MSK;
376
377 switch (state) {
378 case LOW_POWER:
379 writel(WAKEUP_STATE, &pctl->sctl);
380 while ((readl(&pctl->stat) & PCTL_STAT_MSK)
381 != ACCESS)
382 ;
383 /* wait DLL lock */
384 while ((readl(&publ->pgsr) & PGSR_DLDONE)
385 != PGSR_DLDONE)
386 ;
387 /* if at low power state,need wakeup first,
388 * and then enter the config
389 * so here no break.
390 */
391 case ACCESS:
392 /* no break */
393 case INIT_MEM:
394 writel(CFG_STATE, &pctl->sctl);
395 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
396 ;
397 break;
398 case CONFIG:
399 return;
400 default:
401 break;
402 }
403 }
404}
405
406static void set_bandwidth_ratio(const struct chan_info *chan, u32 channel,
407 u32 n, struct rk3288_grf *grf)
408{
409 struct rk3288_ddr_pctl *pctl = chan->pctl;
410 struct rk3288_ddr_publ *publ = chan->publ;
411 struct rk3288_msch *msch = chan->msch;
412
413 if (n == 1) {
414 setbits_le32(&pctl->ppcfg, 1);
Simon Glassa7376f02016-01-21 19:44:01 -0700415 rk_setreg(&grf->soc_con0, 1 << (8 + channel));
Simon Glass0aced102015-08-30 16:55:36 -0600416 setbits_le32(&msch->ddrtiming, 1 << 31);
417 /* Data Byte disable*/
418 clrbits_le32(&publ->datx8[2].dxgcr, 1);
419 clrbits_le32(&publ->datx8[3].dxgcr, 1);
Simon Glassf1a81c02016-01-21 19:45:10 -0700420 /* disable DLL */
Simon Glass0aced102015-08-30 16:55:36 -0600421 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
422 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
423 } else {
424 clrbits_le32(&pctl->ppcfg, 1);
Simon Glassa7376f02016-01-21 19:44:01 -0700425 rk_clrreg(&grf->soc_con0, 1 << (8 + channel));
Simon Glass0aced102015-08-30 16:55:36 -0600426 clrbits_le32(&msch->ddrtiming, 1 << 31);
427 /* Data Byte enable*/
428 setbits_le32(&publ->datx8[2].dxgcr, 1);
429 setbits_le32(&publ->datx8[3].dxgcr, 1);
430
Simon Glassf1a81c02016-01-21 19:45:10 -0700431 /* enable DLL */
Simon Glass0aced102015-08-30 16:55:36 -0600432 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLDIS);
433 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLDIS);
434 /* reset DLL */
435 clrbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
436 clrbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
437 udelay(10);
438 setbits_le32(&publ->datx8[2].dxdllcr, DXDLLCR_DLLSRST);
439 setbits_le32(&publ->datx8[3].dxdllcr, DXDLLCR_DLLSRST);
440 }
441 setbits_le32(&pctl->dfistcfg0, 1 << 2);
442}
443
444static int data_training(const struct chan_info *chan, u32 channel,
445 const struct rk3288_sdram_params *sdram_params)
446{
447 unsigned int j;
448 int ret = 0;
449 u32 rank;
450 int i;
451 u32 step[2] = { PIR_QSTRN, PIR_RVTRN };
452 struct rk3288_ddr_publ *publ = chan->publ;
453 struct rk3288_ddr_pctl *pctl = chan->pctl;
454
455 /* disable auto refresh */
456 writel(0, &pctl->trefi);
457
458 if (sdram_params->base.dramtype != LPDDR3)
459 setbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
460 rank = sdram_params->ch[channel].rank | 1;
461 for (j = 0; j < ARRAY_SIZE(step); j++) {
462 /*
463 * trigger QSTRN and RVTRN
464 * clear DTDONE status
465 */
466 setbits_le32(&publ->pir, PIR_CLRSR);
467
468 /* trigger DTT */
469 setbits_le32(&publ->pir,
470 PIR_INIT | step[j] | PIR_LOCKBYP | PIR_ZCALBYP |
471 PIR_CLRSR);
472 udelay(1);
473 /* wait echo byte DTDONE */
474 while ((readl(&publ->datx8[0].dxgsr[0]) & rank)
475 != rank)
476 ;
477 while ((readl(&publ->datx8[1].dxgsr[0]) & rank)
478 != rank)
479 ;
480 if (!(readl(&pctl->ppcfg) & 1)) {
481 while ((readl(&publ->datx8[2].dxgsr[0])
482 & rank) != rank)
483 ;
484 while ((readl(&publ->datx8[3].dxgsr[0])
485 & rank) != rank)
486 ;
487 }
488 if (readl(&publ->pgsr) &
489 (PGSR_DTERR | PGSR_RVERR | PGSR_RVEIRR)) {
490 ret = -1;
491 break;
492 }
493 }
494 /* send some auto refresh to complement the lost while DTT */
495 for (i = 0; i < (rank > 1 ? 8 : 4); i++)
496 send_command(pctl, rank, REF_CMD, 0);
497
498 if (sdram_params->base.dramtype != LPDDR3)
499 clrbits_le32(&publ->pgcr, 1 << PGCR_DQSCFG_SHIFT);
500
501 /* resume auto refresh */
502 writel(sdram_params->pctl_timing.trefi, &pctl->trefi);
503
504 return ret;
505}
506
507static void move_to_access_state(const struct chan_info *chan)
508{
509 struct rk3288_ddr_publ *publ = chan->publ;
510 struct rk3288_ddr_pctl *pctl = chan->pctl;
511 unsigned int state;
512
513 while (1) {
514 state = readl(&pctl->stat) & PCTL_STAT_MSK;
515
516 switch (state) {
517 case LOW_POWER:
518 if (((readl(&pctl->stat) >> LP_TRIG_SHIFT) &
519 LP_TRIG_MASK) == 1)
520 return;
521
522 writel(WAKEUP_STATE, &pctl->sctl);
523 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != ACCESS)
524 ;
525 /* wait DLL lock */
526 while ((readl(&publ->pgsr) & PGSR_DLDONE)
527 != PGSR_DLDONE)
528 ;
529 break;
530 case INIT_MEM:
531 writel(CFG_STATE, &pctl->sctl);
532 while ((readl(&pctl->stat) & PCTL_STAT_MSK) != CONFIG)
533 ;
534 case CONFIG:
535 writel(GO_STATE, &pctl->sctl);
536 while ((readl(&pctl->stat) & PCTL_STAT_MSK) == CONFIG)
537 ;
538 break;
539 case ACCESS:
540 return;
541 default:
542 break;
543 }
544 }
545}
546
547static void dram_cfg_rbc(const struct chan_info *chan, u32 chnum,
548 const struct rk3288_sdram_params *sdram_params)
549{
550 struct rk3288_ddr_publ *publ = chan->publ;
551
552 if (sdram_params->ch[chnum].bk == 3)
553 clrsetbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT,
554 1 << PDQ_SHIFT);
555 else
556 clrbits_le32(&publ->dcr, PDQ_MASK << PDQ_SHIFT);
557
558 writel(sdram_params->base.ddrconfig, &chan->msch->ddrconf);
559}
560
561static void dram_all_config(const struct dram_info *dram,
562 const struct rk3288_sdram_params *sdram_params)
563{
564 unsigned int chan;
565 u32 sys_reg = 0;
566
567 sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
568 sys_reg |= (sdram_params->num_channels - 1) << SYS_REG_NUM_CH_SHIFT;
569 for (chan = 0; chan < sdram_params->num_channels; chan++) {
570 const struct rk3288_sdram_channel *info =
571 &sdram_params->ch[chan];
572
573 sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(chan);
Vagrant Cascadian50798cc2016-04-15 13:43:25 -0700574 sys_reg |= chan << SYS_REG_CHINFO_SHIFT(chan);
Simon Glass0aced102015-08-30 16:55:36 -0600575 sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(chan);
576 sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(chan);
Vagrant Cascadian50798cc2016-04-15 13:43:25 -0700577 sys_reg |= info->bk == 3 ? 1 << SYS_REG_BK_SHIFT(chan) : 0;
Simon Glass0aced102015-08-30 16:55:36 -0600578 sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(chan);
579 sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(chan);
Vagrant Cascadian50798cc2016-04-15 13:43:25 -0700580 sys_reg |= info->bw << SYS_REG_BW_SHIFT(chan);
581 sys_reg |= info->dbw << SYS_REG_DBW_SHIFT(chan);
Simon Glass0aced102015-08-30 16:55:36 -0600582
583 dram_cfg_rbc(&dram->chan[chan], chan, sdram_params);
584 }
585 writel(sys_reg, &dram->pmu->sys_reg[2]);
Simon Glassa7376f02016-01-21 19:44:01 -0700586 rk_clrsetreg(&dram->sgrf->soc_con2, 0x1f, sdram_params->base.stride);
Simon Glass0aced102015-08-30 16:55:36 -0600587}
588
Stephen Warrena9622432016-06-17 09:44:00 -0600589static int sdram_init(struct dram_info *dram,
Simon Glass0aced102015-08-30 16:55:36 -0600590 const struct rk3288_sdram_params *sdram_params)
591{
592 int channel;
593 int zqcr;
594 int ret;
595
596 debug("%s start\n", __func__);
597 if ((sdram_params->base.dramtype == DDR3 &&
598 sdram_params->base.ddr_freq > 800000000) ||
599 (sdram_params->base.dramtype == LPDDR3 &&
600 sdram_params->base.ddr_freq > 533000000)) {
601 debug("SDRAM frequency is too high!");
602 return -E2BIG;
603 }
604
Stephen Warrena9622432016-06-17 09:44:00 -0600605 debug("ddr clk dpll\n");
606 ret = clk_set_rate(&dram->ddr_clk, sdram_params->base.ddr_freq);
Simon Glass0aced102015-08-30 16:55:36 -0600607 debug("ret=%d\n", ret);
608 if (ret) {
609 debug("Could not set DDR clock\n");
610 return ret;
611 }
612
613 for (channel = 0; channel < 2; channel++) {
614 const struct chan_info *chan = &dram->chan[channel];
615 struct rk3288_ddr_pctl *pctl = chan->pctl;
616 struct rk3288_ddr_publ *publ = chan->publ;
617
618 phy_pctrl_reset(dram->cru, publ, channel);
619 phy_dll_bypass_set(publ, sdram_params->base.ddr_freq);
620
621 if (channel >= sdram_params->num_channels)
622 continue;
623
624 dfi_cfg(pctl, sdram_params->base.dramtype);
625
626 pctl_cfg(channel, pctl, sdram_params, dram->grf);
627
628 phy_cfg(chan, channel, sdram_params);
629
630 phy_init(publ);
631
632 writel(POWER_UP_START, &pctl->powctl);
633 while (!(readl(&pctl->powstat) & POWER_UP_DONE))
634 ;
635
636 memory_init(publ, sdram_params->base.dramtype);
637 move_to_config_state(publ, pctl);
638
639 if (sdram_params->base.dramtype == LPDDR3) {
640 send_command(pctl, 3, DESELECT_CMD, 0);
641 udelay(1);
642 send_command(pctl, 3, PREA_CMD, 0);
643 udelay(1);
644 send_command_op(pctl, 3, MRS_CMD, 63, 0xfc);
645 udelay(1);
646 send_command_op(pctl, 3, MRS_CMD, 1,
647 sdram_params->phy_timing.mr[1]);
648 udelay(1);
649 send_command_op(pctl, 3, MRS_CMD, 2,
650 sdram_params->phy_timing.mr[2]);
651 udelay(1);
652 send_command_op(pctl, 3, MRS_CMD, 3,
653 sdram_params->phy_timing.mr[3]);
654 udelay(1);
655 }
656
657 set_bandwidth_ratio(chan, channel,
658 sdram_params->ch[channel].bw, dram->grf);
659 /*
660 * set cs
661 * CS0, n=1
662 * CS1, n=2
663 * CS0 & CS1, n = 3
664 */
665 clrsetbits_le32(&publ->pgcr, 0xF << 18,
666 (sdram_params->ch[channel].rank | 1) << 18);
667 /* DS=40ohm,ODT=155ohm */
668 zqcr = 1 << ZDEN_SHIFT | 2 << PU_ONDIE_SHIFT |
669 2 << PD_ONDIE_SHIFT | 0x19 << PU_OUTPUT_SHIFT |
670 0x19 << PD_OUTPUT_SHIFT;
671 writel(zqcr, &publ->zq1cr[0]);
672 writel(zqcr, &publ->zq0cr[0]);
673
674 if (sdram_params->base.dramtype == LPDDR3) {
675 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
676 udelay(10);
677 send_command_op(pctl,
678 sdram_params->ch[channel].rank | 1,
679 MRS_CMD, 11,
680 sdram_params->base.odt ? 3 : 0);
681 if (channel == 0) {
682 writel(0, &pctl->mrrcfg0);
683 send_command_op(pctl, 1, MRR_CMD, 8, 0);
684 /* S8 */
685 if ((readl(&pctl->mrrstat0) & 0x3) != 3) {
686 debug("failed!");
687 return -EREMOTEIO;
688 }
689 }
690 }
691
692 if (-1 == data_training(chan, channel, sdram_params)) {
693 if (sdram_params->base.dramtype == LPDDR3) {
694 ddr_phy_ctl_reset(dram->cru, channel, 1);
695 udelay(10);
696 ddr_phy_ctl_reset(dram->cru, channel, 0);
697 udelay(10);
698 }
699 debug("failed!");
700 return -EIO;
701 }
702
703 if (sdram_params->base.dramtype == LPDDR3) {
704 u32 i;
705 writel(0, &pctl->mrrcfg0);
706 for (i = 0; i < 17; i++)
707 send_command_op(pctl, 1, MRR_CMD, i, 0);
708 }
709 move_to_access_state(chan);
710 }
711 dram_all_config(dram, sdram_params);
712 debug("%s done\n", __func__);
713
714 return 0;
715}
Simon Glass8e6af6f2016-07-04 11:58:34 -0600716#endif /* CONFIG_SPL_BUILD */
Simon Glass0aced102015-08-30 16:55:36 -0600717
718size_t sdram_size_mb(struct rk3288_pmu *pmu)
719{
720 u32 rank, col, bk, cs0_row, cs1_row, bw, row_3_4;
721 size_t chipsize_mb = 0;
722 size_t size_mb = 0;
723 u32 ch;
724 u32 sys_reg = readl(&pmu->sys_reg[2]);
725 u32 chans;
726
727 chans = 1 + ((sys_reg >> SYS_REG_NUM_CH_SHIFT) & SYS_REG_NUM_CH_MASK);
728
729 for (ch = 0; ch < chans; ch++) {
730 rank = 1 + (sys_reg >> SYS_REG_RANK_SHIFT(ch) &
731 SYS_REG_RANK_MASK);
732 col = 9 + (sys_reg >> SYS_REG_COL_SHIFT(ch) & SYS_REG_COL_MASK);
Vagrant Cascadian50798cc2016-04-15 13:43:25 -0700733 bk = sys_reg & (1 << SYS_REG_BK_SHIFT(ch)) ? 3 : 0;
Simon Glass0aced102015-08-30 16:55:36 -0600734 cs0_row = 13 + (sys_reg >> SYS_REG_CS0_ROW_SHIFT(ch) &
735 SYS_REG_CS0_ROW_MASK);
736 cs1_row = 13 + (sys_reg >> SYS_REG_CS1_ROW_SHIFT(ch) &
737 SYS_REG_CS1_ROW_MASK);
Vagrant Cascadian50798cc2016-04-15 13:43:25 -0700738 bw = (sys_reg >> SYS_REG_BW_SHIFT(ch)) &
739 SYS_REG_BW_MASK;
Simon Glass0aced102015-08-30 16:55:36 -0600740 row_3_4 = sys_reg >> SYS_REG_ROW_3_4_SHIFT(ch) &
741 SYS_REG_ROW_3_4_MASK;
742
743 chipsize_mb = (1 << (cs0_row + col + bk + bw - 20));
744
745 if (rank > 1)
746 chipsize_mb += chipsize_mb >>
747 (cs0_row - cs1_row);
748 if (row_3_4)
749 chipsize_mb = chipsize_mb * 3 / 4;
750 size_mb += chipsize_mb;
751 }
752
753 /*
754 * we use the 0x00000000~0xfeffffff space since 0xff000000~0xffffffff
755 * is SoC register space (i.e. reserved)
756 */
757 size_mb = min(size_mb, 0xff000000 >> 20);
758
759 return size_mb;
760}
761
762#ifdef CONFIG_SPL_BUILD
Simon Glass94906e42016-01-21 19:45:17 -0700763# ifdef CONFIG_ROCKCHIP_FAST_SPL
764static int veyron_init(struct dram_info *priv)
765{
766 struct udevice *pmic;
767 int ret;
768
Simon Glassc7298e72016-02-11 13:23:26 -0700769 ret = uclass_first_device_err(UCLASS_PMIC, &pmic);
Simon Glass94906e42016-01-21 19:45:17 -0700770 if (ret)
771 return ret;
772
773 /* Slowly raise to max CPU voltage to prevent overshoot */
774 ret = rk808_spl_configure_buck(pmic, 1, 1200000);
775 if (ret)
776 return ret;
777 udelay(175);/* Must wait for voltage to stabilize, 2mV/us */
778 ret = rk808_spl_configure_buck(pmic, 1, 1400000);
779 if (ret)
780 return ret;
781 udelay(100);/* Must wait for voltage to stabilize, 2mV/us */
782
783 rkclk_configure_cpu(priv->cru, priv->grf);
784
785 return 0;
786}
787# endif
788
Simon Glass0aced102015-08-30 16:55:36 -0600789static int setup_sdram(struct udevice *dev)
790{
791 struct dram_info *priv = dev_get_priv(dev);
Simon Glass8e6af6f2016-07-04 11:58:34 -0600792 struct rk3288_sdram_params *params = dev_get_platdata(dev);
793
794# ifdef CONFIG_ROCKCHIP_FAST_SPL
795 if (priv->is_veyron) {
796 int ret;
797
798 ret = veyron_init(priv);
799 if (ret)
800 return ret;
801 }
802# endif
803
804 return sdram_init(priv, params);
805}
806
807static int rk3288_dmc_ofdata_to_platdata(struct udevice *dev)
808{
809 struct rk3288_sdram_params *params = dev_get_platdata(dev);
Simon Glass0aced102015-08-30 16:55:36 -0600810 const void *blob = gd->fdt_blob;
811 int node = dev->of_offset;
812 int i, ret;
813
Simon Glass8e6af6f2016-07-04 11:58:34 -0600814 params->num_channels = fdtdec_get_int(blob, node,
815 "rockchip,num-channels", 1);
816 for (i = 0; i < params->num_channels; i++) {
Simon Glass0aced102015-08-30 16:55:36 -0600817 ret = fdtdec_get_byte_array(blob, node,
818 "rockchip,sdram-channel",
Simon Glass8e6af6f2016-07-04 11:58:34 -0600819 (u8 *)&params->ch[i],
820 sizeof(params->ch[i]));
Simon Glass0aced102015-08-30 16:55:36 -0600821 if (ret) {
822 debug("%s: Cannot read rockchip,sdram-channel\n",
823 __func__);
824 return -EINVAL;
825 }
826 }
827 ret = fdtdec_get_int_array(blob, node, "rockchip,pctl-timing",
Simon Glass8e6af6f2016-07-04 11:58:34 -0600828 (u32 *)&params->pctl_timing,
829 sizeof(params->pctl_timing) / sizeof(u32));
Simon Glass0aced102015-08-30 16:55:36 -0600830 if (ret) {
831 debug("%s: Cannot read rockchip,pctl-timing\n", __func__);
832 return -EINVAL;
833 }
834 ret = fdtdec_get_int_array(blob, node, "rockchip,phy-timing",
Simon Glass8e6af6f2016-07-04 11:58:34 -0600835 (u32 *)&params->phy_timing,
836 sizeof(params->phy_timing) / sizeof(u32));
Simon Glass0aced102015-08-30 16:55:36 -0600837 if (ret) {
838 debug("%s: Cannot read rockchip,phy-timing\n", __func__);
839 return -EINVAL;
840 }
841 ret = fdtdec_get_int_array(blob, node, "rockchip,sdram-params",
Simon Glass8e6af6f2016-07-04 11:58:34 -0600842 (u32 *)&params->base,
843 sizeof(params->base) / sizeof(u32));
Simon Glass0aced102015-08-30 16:55:36 -0600844 if (ret) {
845 debug("%s: Cannot read rockchip,sdram-params\n", __func__);
846 return -EINVAL;
847 }
Simon Glass8e6af6f2016-07-04 11:58:34 -0600848#ifdef CONFIG_ROCKCHIP_FAST_SPL
849 struct dram_info *priv = dev_get_priv(dev);
Simon Glass94906e42016-01-21 19:45:17 -0700850
Simon Glass8e6af6f2016-07-04 11:58:34 -0600851 priv->is_veyron = !fdt_node_check_compatible(blob, 0, "google,veyron");
852#endif
853 ret = regmap_init_mem(dev, &params->map);
854 if (ret)
855 return ret;
Simon Glass0aced102015-08-30 16:55:36 -0600856
Simon Glass8e6af6f2016-07-04 11:58:34 -0600857 return 0;
Simon Glass0aced102015-08-30 16:55:36 -0600858}
Simon Glass8e6af6f2016-07-04 11:58:34 -0600859#endif /* CONFIG_SPL_BUILD */
Simon Glass0aced102015-08-30 16:55:36 -0600860
861static int rk3288_dmc_probe(struct udevice *dev)
862{
Simon Glass8e6af6f2016-07-04 11:58:34 -0600863#ifdef CONFIG_SPL_BUILD
864 struct rk3288_sdram_params *plat = dev_get_platdata(dev);
865#endif
Simon Glass0aced102015-08-30 16:55:36 -0600866 struct dram_info *priv = dev_get_priv(dev);
867 struct regmap *map;
868 int ret;
Stephen Warrena9622432016-06-17 09:44:00 -0600869 struct udevice *dev_clk;
Simon Glass0aced102015-08-30 16:55:36 -0600870
871 map = syscon_get_regmap_by_driver_data(ROCKCHIP_SYSCON_NOC);
872 if (IS_ERR(map))
873 return PTR_ERR(map);
874 priv->chan[0].msch = regmap_get_range(map, 0);
875 priv->chan[1].msch = (struct rk3288_msch *)
876 (regmap_get_range(map, 0) + 0x80);
877
Simon Glass3fee8c42016-01-21 19:45:11 -0700878 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
879 priv->sgrf = syscon_get_first_range(ROCKCHIP_SYSCON_SGRF);
880 priv->pmu = syscon_get_first_range(ROCKCHIP_SYSCON_PMU);
Simon Glass0aced102015-08-30 16:55:36 -0600881
Simon Glass8e6af6f2016-07-04 11:58:34 -0600882#ifdef CONFIG_SPL_BUILD
883 priv->chan[0].pctl = regmap_get_range(plat->map, 0);
884 priv->chan[0].publ = regmap_get_range(plat->map, 1);
885 priv->chan[1].pctl = regmap_get_range(plat->map, 2);
886 priv->chan[1].publ = regmap_get_range(plat->map, 3);
887#endif
Stephen Warrena9622432016-06-17 09:44:00 -0600888 ret = uclass_get_device(UCLASS_CLK, 0, &dev_clk);
889 if (ret)
890 return ret;
891 priv->ddr_clk.id = CLK_DDR;
892 ret = clk_request(dev_clk, &priv->ddr_clk);
Simon Glass0aced102015-08-30 16:55:36 -0600893 if (ret)
894 return ret;
895
896 priv->cru = rockchip_get_cru();
897 if (IS_ERR(priv->cru))
898 return PTR_ERR(priv->cru);
899#ifdef CONFIG_SPL_BUILD
900 ret = setup_sdram(dev);
901 if (ret)
902 return ret;
903#endif
904 priv->info.base = 0;
905 priv->info.size = sdram_size_mb(priv->pmu) << 20;
906
907 return 0;
908}
909
910static int rk3288_dmc_get_info(struct udevice *dev, struct ram_info *info)
911{
912 struct dram_info *priv = dev_get_priv(dev);
913
914 *info = priv->info;
915
916 return 0;
917}
918
919static struct ram_ops rk3288_dmc_ops = {
920 .get_info = rk3288_dmc_get_info,
921};
922
923static const struct udevice_id rk3288_dmc_ids[] = {
924 { .compatible = "rockchip,rk3288-dmc" },
925 { }
926};
927
928U_BOOT_DRIVER(dmc_rk3288) = {
929 .name = "rk3288_dmc",
930 .id = UCLASS_RAM,
931 .of_match = rk3288_dmc_ids,
932 .ops = &rk3288_dmc_ops,
Simon Glass8e6af6f2016-07-04 11:58:34 -0600933#ifdef CONFIG_SPL_BUILD
934 .ofdata_to_platdata = rk3288_dmc_ofdata_to_platdata,
935#endif
Simon Glass0aced102015-08-30 16:55:36 -0600936 .probe = rk3288_dmc_probe,
937 .priv_auto_alloc_size = sizeof(struct dram_info),
Simon Glass8e6af6f2016-07-04 11:58:34 -0600938#ifdef CONFIG_SPL_BUILD
939 .platdata_auto_alloc_size = sizeof(struct rk3288_sdram_params),
940#endif
Simon Glass0aced102015-08-30 16:55:36 -0600941};