blob: 608482ab740ae5b79f28f0c0b8f602ad24229fec [file] [log] [blame]
Caesar Wangf33eb2c2016-10-27 01:13:16 +08001/*
2 * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
3 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Caesar Wangf33eb2c2016-10-27 01:13:16 +08005 */
6#include <debug.h>
7#include <arch_helpers.h>
8#include <platform_def.h>
9#include <plat_private.h>
10#include <dram.h>
11#include <pmu_regs.h>
12#include <rk3399_def.h>
Xing Zheng22a98712017-02-24 14:56:41 +080013#include <secure.h>
Caesar Wangf33eb2c2016-10-27 01:13:16 +080014#include <soc.h>
15#include <suspend.h>
16
17#define PMUGRF_OS_REG0 0x300
18#define PMUGRF_OS_REG1 0x304
19#define PMUGRF_OS_REG2 0x308
20#define PMUGRF_OS_REG3 0x30c
21
22#define CRU_SFTRST_DDR_CTRL(ch, n) ((0x1 << (8 + 16 + (ch) * 4)) | \
23 ((n) << (8 + (ch) * 4)))
24#define CRU_SFTRST_DDR_PHY(ch, n) ((0x1 << (9 + 16 + (ch) * 4)) | \
25 ((n) << (9 + (ch) * 4)))
26
27#define FBDIV_ENC(n) ((n) << 16)
28#define FBDIV_DEC(n) (((n) >> 16) & 0xfff)
29#define POSTDIV2_ENC(n) ((n) << 12)
30#define POSTDIV2_DEC(n) (((n) >> 12) & 0x7)
31#define POSTDIV1_ENC(n) ((n) << 8)
32#define POSTDIV1_DEC(n) (((n) >> 8) & 0x7)
33#define REFDIV_ENC(n) (n)
34#define REFDIV_DEC(n) ((n) & 0x3f)
35
36/* PMU CRU */
37#define PMUCRU_RSTNHOLD_CON0 0x120
38#define PMUCRU_RSTNHOLD_CON1 0x124
39
40#define PRESET_GPIO0_HOLD(n) (((n) << 7) | WMSK_BIT(7))
41#define PRESET_GPIO1_HOLD(n) (((n) << 8) | WMSK_BIT(8))
42
43#define SYS_COUNTER_FREQ_IN_MHZ (SYS_COUNTER_FREQ_IN_TICKS / 1000000)
44
45/*
46 * Copy @num registers from @src to @dst
47 */
48__sramfunc void sram_regcpy(uintptr_t dst, uintptr_t src, uint32_t num)
49{
50 while (num--) {
51 mmio_write_32(dst, mmio_read_32(src));
52 dst += sizeof(uint32_t);
53 src += sizeof(uint32_t);
54 }
55}
56
57static __sramfunc uint32_t sram_get_timer_value(void)
58{
59 /*
60 * Generic delay timer implementation expects the timer to be a down
61 * counter. We apply bitwise NOT operator to the tick values returned
62 * by read_cntpct_el0() to simulate the down counter.
63 */
64 return (uint32_t)(~read_cntpct_el0());
65}
66
67static __sramfunc void sram_udelay(uint32_t usec)
68{
69 uint32_t start, cnt, delta, delta_us;
70
71 /* counter is decreasing */
72 start = sram_get_timer_value();
73 do {
74 cnt = sram_get_timer_value();
75 if (cnt > start) {
76 delta = UINT32_MAX - cnt;
77 delta += start;
78 } else
79 delta = start - cnt;
80 delta_us = (delta * SYS_COUNTER_FREQ_IN_MHZ);
81 } while (delta_us < usec);
82}
83
84static __sramfunc void configure_sgrf(void)
85{
86 /*
87 * SGRF_DDR_RGN_DPLL_CLK and SGRF_DDR_RGN_RTC_CLK:
88 * IC ECO bug, need to set this register.
89 *
90 * SGRF_DDR_RGN_BYPS:
91 * After the PD_CENTER suspend/resume, the DDR region
92 * related registers in the SGRF will be reset, we
93 * need to re-initialize them.
94 */
95 mmio_write_32(SGRF_BASE + SGRF_DDRRGN_CON0_16(16),
96 SGRF_DDR_RGN_DPLL_CLK |
97 SGRF_DDR_RGN_RTC_CLK |
98 SGRF_DDR_RGN_BYPS);
99}
100
101static __sramfunc void rkclk_ddr_reset(uint32_t channel, uint32_t ctl,
102 uint32_t phy)
103{
104 channel &= 0x1;
105 ctl &= 0x1;
106 phy &= 0x1;
107 mmio_write_32(CRU_BASE + CRU_SOFTRST_CON(4),
108 CRU_SFTRST_DDR_CTRL(channel, ctl) |
109 CRU_SFTRST_DDR_PHY(channel, phy));
110}
111
112static __sramfunc void phy_pctrl_reset(uint32_t ch)
113{
114 rkclk_ddr_reset(ch, 1, 1);
115 sram_udelay(10);
116 rkclk_ddr_reset(ch, 1, 0);
117 sram_udelay(10);
118 rkclk_ddr_reset(ch, 0, 0);
119 sram_udelay(10);
120}
121
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800122static __sramfunc void set_cs_training_index(uint32_t ch, uint32_t rank)
123{
Derek Basehore7b4d8982017-05-12 21:29:13 -0700124 uint32_t byte;
125
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800126 /* PHY_8/136/264/392 phy_per_cs_training_index_X 1bit offset_24 */
Derek Basehore7b4d8982017-05-12 21:29:13 -0700127 for (byte = 0; byte < 4; byte++)
128 mmio_clrsetbits_32(PHY_REG(ch, 8 + (128 * byte)), 0x1 << 24,
129 rank << 24);
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800130}
131
132static __sramfunc void select_per_cs_training_index(uint32_t ch, uint32_t rank)
133{
134 /* PHY_84 PHY_PER_CS_TRAINING_EN_0 1bit offset_16 */
135 if ((mmio_read_32(PHY_REG(ch, 84)) >> 16) & 1)
136 set_cs_training_index(ch, rank);
137}
138
139static void override_write_leveling_value(uint32_t ch)
140{
141 uint32_t byte;
142
Derek Basehore7b4d8982017-05-12 21:29:13 -0700143 for (byte = 0; byte < 4; byte++) {
144 /*
145 * PHY_8/136/264/392
146 * phy_per_cs_training_multicast_en_X 1bit offset_16
147 */
148 mmio_clrsetbits_32(PHY_REG(ch, 8 + (128 * byte)), 0x1 << 16,
149 1 << 16);
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800150 mmio_clrsetbits_32(PHY_REG(ch, 63 + (128 * byte)),
151 0xffff << 16,
152 0x200 << 16);
Derek Basehore7b4d8982017-05-12 21:29:13 -0700153 }
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800154
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800155 /* CTL_200 ctrlupd_req 1bit offset_8 */
156 mmio_clrsetbits_32(CTL_REG(ch, 200), 0x1 << 8, 0x1 << 8);
157}
158
159static __sramfunc int data_training(uint32_t ch,
160 struct rk3399_sdram_params *sdram_params,
161 uint32_t training_flag)
162{
163 uint32_t obs_0, obs_1, obs_2, obs_3, obs_err = 0;
164 uint32_t rank = sdram_params->ch[ch].rank;
165 uint32_t rank_mask;
166 uint32_t i, tmp;
167
168 if (sdram_params->dramtype == LPDDR4)
169 rank_mask = (rank == 1) ? 0x5 : 0xf;
170 else
171 rank_mask = (rank == 1) ? 0x1 : 0x3;
172
173 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
174 mmio_setbits_32(PHY_REG(ch, 927), (1 << 22));
175
176 if (training_flag == PI_FULL_TRAINING) {
177 if (sdram_params->dramtype == LPDDR4) {
178 training_flag = PI_WRITE_LEVELING |
179 PI_READ_GATE_TRAINING |
180 PI_READ_LEVELING |
181 PI_WDQ_LEVELING;
182 } else if (sdram_params->dramtype == LPDDR3) {
183 training_flag = PI_CA_TRAINING | PI_WRITE_LEVELING |
184 PI_READ_GATE_TRAINING;
185 } else if (sdram_params->dramtype == DDR3) {
186 training_flag = PI_WRITE_LEVELING |
187 PI_READ_GATE_TRAINING |
188 PI_READ_LEVELING;
189 }
190 }
191
192 /* ca training(LPDDR4,LPDDR3 support) */
193 if ((training_flag & PI_CA_TRAINING) == PI_CA_TRAINING) {
194 for (i = 0; i < 4; i++) {
195 if (!(rank_mask & (1 << i)))
196 continue;
197
198 select_per_cs_training_index(ch, i);
199 /* PI_100 PI_CALVL_EN:RW:8:2 */
200 mmio_clrsetbits_32(PI_REG(ch, 100), 0x3 << 8, 0x2 << 8);
201
202 /* PI_92 PI_CALVL_REQ:WR:16:1,PI_CALVL_CS:RW:24:2 */
203 mmio_clrsetbits_32(PI_REG(ch, 92),
204 (0x1 << 16) | (0x3 << 24),
205 (0x1 << 16) | (i << 24));
206 while (1) {
207 /* PI_174 PI_INT_STATUS:RD:8:18 */
208 tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
209
210 /*
211 * check status obs
212 * PHY_532/660/788 phy_adr_calvl_obs1_:0:32
213 */
214 obs_0 = mmio_read_32(PHY_REG(ch, 532));
215 obs_1 = mmio_read_32(PHY_REG(ch, 660));
216 obs_2 = mmio_read_32(PHY_REG(ch, 788));
217 if (((obs_0 >> 30) & 0x3) ||
218 ((obs_1 >> 30) & 0x3) ||
219 ((obs_2 >> 30) & 0x3))
220 obs_err = 1;
221 if ((((tmp >> 11) & 0x1) == 0x1) &&
222 (((tmp >> 13) & 0x1) == 0x1) &&
223 (((tmp >> 5) & 0x1) == 0x0) &&
224 (obs_err == 0))
225 break;
226 else if ((((tmp >> 5) & 0x1) == 0x1) ||
227 (obs_err == 1))
228 return -1;
229 }
230 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
231 mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
232 }
233 mmio_clrbits_32(PI_REG(ch, 100), 0x3 << 8);
234 }
235
236 /* write leveling(LPDDR4,LPDDR3,DDR3 support) */
237 if ((training_flag & PI_WRITE_LEVELING) == PI_WRITE_LEVELING) {
238 for (i = 0; i < rank; i++) {
239 select_per_cs_training_index(ch, i);
240 /* PI_60 PI_WRLVL_EN:RW:8:2 */
241 mmio_clrsetbits_32(PI_REG(ch, 60), 0x3 << 8, 0x2 << 8);
242 /* PI_59 PI_WRLVL_REQ:WR:8:1,PI_WRLVL_CS:RW:16:2 */
243 mmio_clrsetbits_32(PI_REG(ch, 59),
244 (0x1 << 8) | (0x3 << 16),
245 (0x1 << 8) | (i << 16));
246
247 while (1) {
248 /* PI_174 PI_INT_STATUS:RD:8:18 */
249 tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
250
251 /*
252 * check status obs, if error maybe can not
253 * get leveling done PHY_40/168/296/424
254 * phy_wrlvl_status_obs_X:0:13
255 */
256 obs_0 = mmio_read_32(PHY_REG(ch, 40));
257 obs_1 = mmio_read_32(PHY_REG(ch, 168));
258 obs_2 = mmio_read_32(PHY_REG(ch, 296));
259 obs_3 = mmio_read_32(PHY_REG(ch, 424));
260 if (((obs_0 >> 12) & 0x1) ||
261 ((obs_1 >> 12) & 0x1) ||
262 ((obs_2 >> 12) & 0x1) ||
263 ((obs_3 >> 12) & 0x1))
264 obs_err = 1;
265 if ((((tmp >> 10) & 0x1) == 0x1) &&
266 (((tmp >> 13) & 0x1) == 0x1) &&
267 (((tmp >> 4) & 0x1) == 0x0) &&
268 (obs_err == 0))
269 break;
270 else if ((((tmp >> 4) & 0x1) == 0x1) ||
271 (obs_err == 1))
272 return -1;
273 }
274
275 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
276 mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
277 }
278 override_write_leveling_value(ch);
279 mmio_clrbits_32(PI_REG(ch, 60), 0x3 << 8);
280 }
281
282 /* read gate training(LPDDR4,LPDDR3,DDR3 support) */
283 if ((training_flag & PI_READ_GATE_TRAINING) == PI_READ_GATE_TRAINING) {
284 for (i = 0; i < rank; i++) {
285 select_per_cs_training_index(ch, i);
286 /* PI_80 PI_RDLVL_GATE_EN:RW:24:2 */
287 mmio_clrsetbits_32(PI_REG(ch, 80), 0x3 << 24,
288 0x2 << 24);
289 /*
290 * PI_74 PI_RDLVL_GATE_REQ:WR:16:1
291 * PI_RDLVL_CS:RW:24:2
292 */
293 mmio_clrsetbits_32(PI_REG(ch, 74),
294 (0x1 << 16) | (0x3 << 24),
295 (0x1 << 16) | (i << 24));
296
297 while (1) {
298 /* PI_174 PI_INT_STATUS:RD:8:18 */
299 tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
300
301 /*
302 * check status obs
303 * PHY_43/171/299/427
304 * PHY_GTLVL_STATUS_OBS_x:16:8
305 */
306 obs_0 = mmio_read_32(PHY_REG(ch, 43));
307 obs_1 = mmio_read_32(PHY_REG(ch, 171));
308 obs_2 = mmio_read_32(PHY_REG(ch, 299));
309 obs_3 = mmio_read_32(PHY_REG(ch, 427));
310 if (((obs_0 >> (16 + 6)) & 0x3) ||
311 ((obs_1 >> (16 + 6)) & 0x3) ||
312 ((obs_2 >> (16 + 6)) & 0x3) ||
313 ((obs_3 >> (16 + 6)) & 0x3))
314 obs_err = 1;
315 if ((((tmp >> 9) & 0x1) == 0x1) &&
316 (((tmp >> 13) & 0x1) == 0x1) &&
317 (((tmp >> 3) & 0x1) == 0x0) &&
318 (obs_err == 0))
319 break;
320 else if ((((tmp >> 3) & 0x1) == 0x1) ||
321 (obs_err == 1))
322 return -1;
323 }
324 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
325 mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
326 }
327 mmio_clrbits_32(PI_REG(ch, 80), 0x3 << 24);
328 }
329
330 /* read leveling(LPDDR4,LPDDR3,DDR3 support) */
331 if ((training_flag & PI_READ_LEVELING) == PI_READ_LEVELING) {
332 for (i = 0; i < rank; i++) {
333 select_per_cs_training_index(ch, i);
334 /* PI_80 PI_RDLVL_EN:RW:16:2 */
335 mmio_clrsetbits_32(PI_REG(ch, 80), 0x3 << 16,
336 0x2 << 16);
337 /* PI_74 PI_RDLVL_REQ:WR:8:1,PI_RDLVL_CS:RW:24:2 */
338 mmio_clrsetbits_32(PI_REG(ch, 74),
339 (0x1 << 8) | (0x3 << 24),
340 (0x1 << 8) | (i << 24));
341 while (1) {
342 /* PI_174 PI_INT_STATUS:RD:8:18 */
343 tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
344
345 /*
346 * make sure status obs not report error bit
347 * PHY_46/174/302/430
348 * phy_rdlvl_status_obs_X:16:8
349 */
350 if ((((tmp >> 8) & 0x1) == 0x1) &&
351 (((tmp >> 13) & 0x1) == 0x1) &&
352 (((tmp >> 2) & 0x1) == 0x0))
353 break;
354 else if (((tmp >> 2) & 0x1) == 0x1)
355 return -1;
356 }
357 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
358 mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
359 }
360 mmio_clrbits_32(PI_REG(ch, 80), 0x3 << 16);
361 }
362
363 /* wdq leveling(LPDDR4 support) */
364 if ((training_flag & PI_WDQ_LEVELING) == PI_WDQ_LEVELING) {
365 for (i = 0; i < 4; i++) {
366 if (!(rank_mask & (1 << i)))
367 continue;
368
369 select_per_cs_training_index(ch, i);
370 /*
371 * disable PI_WDQLVL_VREF_EN before wdq leveling?
372 * PI_181 PI_WDQLVL_VREF_EN:RW:8:1
373 */
374 mmio_clrbits_32(PI_REG(ch, 181), 0x1 << 8);
375 /* PI_124 PI_WDQLVL_EN:RW:16:2 */
376 mmio_clrsetbits_32(PI_REG(ch, 124), 0x3 << 16,
377 0x2 << 16);
378 /* PI_121 PI_WDQLVL_REQ:WR:8:1,PI_WDQLVL_CS:RW:16:2 */
379 mmio_clrsetbits_32(PI_REG(ch, 121),
380 (0x1 << 8) | (0x3 << 16),
381 (0x1 << 8) | (i << 16));
382 while (1) {
383 /* PI_174 PI_INT_STATUS:RD:8:18 */
384 tmp = mmio_read_32(PI_REG(ch, 174)) >> 8;
385 if ((((tmp >> 12) & 0x1) == 0x1) &&
386 (((tmp >> 13) & 0x1) == 0x1) &&
387 (((tmp >> 6) & 0x1) == 0x0))
388 break;
389 else if (((tmp >> 6) & 0x1) == 0x1)
390 return -1;
391 }
392 /* clear interrupt,PI_175 PI_INT_ACK:WR:0:17 */
393 mmio_write_32(PI_REG(ch, 175), 0x00003f7c);
394 }
395 mmio_clrbits_32(PI_REG(ch, 124), 0x3 << 16);
396 }
397
398 /* PHY_927 PHY_PAD_DQS_DRIVE RPULL offset_22 */
399 mmio_clrbits_32(PHY_REG(ch, 927), (1 << 22));
400
401 return 0;
402}
403
404static __sramfunc void set_ddrconfig(struct rk3399_sdram_params *sdram_params,
405 unsigned char channel, uint32_t ddrconfig)
406{
407 /* only need to set ddrconfig */
408 struct rk3399_sdram_channel *ch = &sdram_params->ch[channel];
409 unsigned int cs0_cap = 0;
410 unsigned int cs1_cap = 0;
411
412 cs0_cap = (1 << (ch->cs0_row + ch->col + ch->bk + ch->bw - 20));
413 if (ch->rank > 1)
414 cs1_cap = cs0_cap >> (ch->cs0_row - ch->cs1_row);
415 if (ch->row_3_4) {
416 cs0_cap = cs0_cap * 3 / 4;
417 cs1_cap = cs1_cap * 3 / 4;
418 }
419
420 mmio_write_32(MSCH_BASE(channel) + MSCH_DEVICECONF,
421 ddrconfig | (ddrconfig << 6));
422 mmio_write_32(MSCH_BASE(channel) + MSCH_DEVICESIZE,
423 ((cs0_cap / 32) & 0xff) | (((cs1_cap / 32) & 0xff) << 8));
424}
425
426static __sramfunc void dram_all_config(struct rk3399_sdram_params *sdram_params)
427{
428 unsigned int i;
429
430 for (i = 0; i < 2; i++) {
431 struct rk3399_sdram_channel *info = &sdram_params->ch[i];
432 struct rk3399_msch_timings *noc = &info->noc_timings;
433
434 if (sdram_params->ch[i].col == 0)
435 continue;
436
437 mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGA0,
438 noc->ddrtiminga0.d32);
439 mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGB0,
440 noc->ddrtimingb0.d32);
441 mmio_write_32(MSCH_BASE(i) + MSCH_DDRTIMINGC0,
442 noc->ddrtimingc0.d32);
443 mmio_write_32(MSCH_BASE(i) + MSCH_DEVTODEV0,
444 noc->devtodev0.d32);
445 mmio_write_32(MSCH_BASE(i) + MSCH_DDRMODE, noc->ddrmode.d32);
446
447 /* rank 1 memory clock disable (dfi_dram_clk_disable = 1) */
448 if (sdram_params->ch[i].rank == 1)
449 mmio_setbits_32(CTL_REG(i, 276), 1 << 17);
450 }
451
452 DDR_STRIDE(sdram_params->stride);
453
454 /* reboot hold register set */
455 mmio_write_32(PMUCRU_BASE + CRU_PMU_RSTHOLD_CON(1),
456 CRU_PMU_SGRF_RST_RLS |
457 PRESET_GPIO0_HOLD(1) |
458 PRESET_GPIO1_HOLD(1));
459 mmio_clrsetbits_32(CRU_BASE + CRU_GLB_RST_CON, 0x3, 0x3);
460}
461
462static __sramfunc void pctl_cfg(uint32_t ch,
463 struct rk3399_sdram_params *sdram_params)
464{
465 const uint32_t *params_ctl = sdram_params->pctl_regs.denali_ctl;
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800466 const uint32_t *params_pi = sdram_params->pi_regs.denali_pi;
Derek Basehore6af5af02017-05-05 17:53:33 -0700467 const struct rk3399_ddr_publ_regs *phy_regs = &sdram_params->phy_regs;
468 uint32_t tmp, tmp1, tmp2, i;
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800469
470 /*
471 * Workaround controller bug:
472 * Do not program DRAM_CLASS until NO_PHY_IND_TRAIN_INT is programmed
473 */
474 sram_regcpy(CTL_REG(ch, 1), (uintptr_t)&params_ctl[1],
475 CTL_REG_NUM - 1);
476 mmio_write_32(CTL_REG(ch, 0), params_ctl[0]);
477 sram_regcpy(PI_REG(ch, 0), (uintptr_t)&params_pi[0],
478 PI_REG_NUM);
479
Derek Basehore6af5af02017-05-05 17:53:33 -0700480 sram_regcpy(PHY_REG(ch, 910), (uintptr_t)&phy_regs->phy896[910 - 896],
481 3);
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800482
483 mmio_clrsetbits_32(CTL_REG(ch, 68), PWRUP_SREFRESH_EXIT,
484 PWRUP_SREFRESH_EXIT);
485
486 /* PHY_DLL_RST_EN */
487 mmio_clrsetbits_32(PHY_REG(ch, 957), 0x3 << 24, 1 << 24);
488 dmbst();
489
490 mmio_setbits_32(PI_REG(ch, 0), START);
491 mmio_setbits_32(CTL_REG(ch, 0), START);
492
493 /* wait lock */
494 while (1) {
495 tmp = mmio_read_32(PHY_REG(ch, 920));
496 tmp1 = mmio_read_32(PHY_REG(ch, 921));
497 tmp2 = mmio_read_32(PHY_REG(ch, 922));
498 if ((((tmp >> 16) & 0x1) == 0x1) &&
499 (((tmp1 >> 16) & 0x1) == 0x1) &&
500 (((tmp1 >> 0) & 0x1) == 0x1) &&
501 (((tmp2 >> 0) & 0x1) == 0x1))
502 break;
503 /* if PLL bypass,don't need wait lock */
504 if (mmio_read_32(PHY_REG(ch, 911)) & 0x1)
505 break;
506 }
507
Derek Basehore6af5af02017-05-05 17:53:33 -0700508 sram_regcpy(PHY_REG(ch, 896), (uintptr_t)&phy_regs->phy896[0], 63);
509
510 for (i = 0; i < 4; i++)
511 sram_regcpy(PHY_REG(ch, 128 * i),
512 (uintptr_t)&phy_regs->phy0[i][0], 91);
513
514 for (i = 0; i < 3; i++)
515 sram_regcpy(PHY_REG(ch, 512 + 128 * i),
516 (uintptr_t)&phy_regs->phy512[i][0], 38);
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800517}
518
Derek Basehoree13bc542017-02-24 14:31:36 +0800519static __sramfunc int dram_switch_to_next_index(
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800520 struct rk3399_sdram_params *sdram_params)
521{
522 uint32_t ch, ch_count;
Derek Basehoree13bc542017-02-24 14:31:36 +0800523 uint32_t fn = ((mmio_read_32(CTL_REG(0, 111)) >> 16) + 1) & 0x1;
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800524
525 mmio_write_32(CIC_BASE + CIC_CTRL0,
526 (((0x3 << 4) | (1 << 2) | 1) << 16) |
Derek Basehoree13bc542017-02-24 14:31:36 +0800527 (fn << 4) | (1 << 2) | 1);
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800528 while (!(mmio_read_32(CIC_BASE + CIC_STATUS0) & (1 << 2)))
529 ;
530
531 mmio_write_32(CIC_BASE + CIC_CTRL0, 0x20002);
532 while (!(mmio_read_32(CIC_BASE + CIC_STATUS0) & (1 << 0)))
533 ;
534
535 ch_count = sdram_params->num_channels;
536
537 /* LPDDR4 f2 cann't do training, all training will fail */
538 for (ch = 0; ch < ch_count; ch++) {
539 mmio_clrsetbits_32(PHY_REG(ch, 896), (0x3 << 8) | 1,
Derek Basehoree13bc542017-02-24 14:31:36 +0800540 fn << 8);
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800541
542 /* data_training failed */
543 if (data_training(ch, sdram_params, PI_FULL_TRAINING))
544 return -1;
545 }
546
547 return 0;
548}
549
550/*
551 * Needs to be done for both channels at once in case of a shared reset signal
552 * between channels.
553 */
554static __sramfunc int pctl_start(uint32_t channel_mask,
555 struct rk3399_sdram_params *sdram_params)
556{
557 uint32_t count;
Derek Basehore04c74b92017-01-31 00:20:19 -0800558 uint32_t byte;
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800559
560 mmio_setbits_32(CTL_REG(0, 68), PWRUP_SREFRESH_EXIT);
561 mmio_setbits_32(CTL_REG(1, 68), PWRUP_SREFRESH_EXIT);
562
563 /* need de-access IO retention before controller START */
564 if (channel_mask & (1 << 0))
565 mmio_setbits_32(PMU_BASE + PMU_PWRMODE_CON, (1 << 19));
566 if (channel_mask & (1 << 1))
567 mmio_setbits_32(PMU_BASE + PMU_PWRMODE_CON, (1 << 23));
568
569 /* PHY_DLL_RST_EN */
570 if (channel_mask & (1 << 0))
571 mmio_clrsetbits_32(PHY_REG(0, 957), 0x3 << 24,
572 0x2 << 24);
573 if (channel_mask & (1 << 1))
574 mmio_clrsetbits_32(PHY_REG(1, 957), 0x3 << 24,
575 0x2 << 24);
576
577 /* check ERROR bit */
578 if (channel_mask & (1 << 0)) {
579 count = 0;
580 while (!(mmio_read_32(CTL_REG(0, 203)) & (1 << 3))) {
581 /* CKE is low, loop 10ms */
582 if (count > 100)
583 return -1;
584
585 sram_udelay(100);
586 count++;
587 }
588
589 mmio_clrbits_32(CTL_REG(0, 68), PWRUP_SREFRESH_EXIT);
Derek Basehore04c74b92017-01-31 00:20:19 -0800590
591 /* Restore the PHY_RX_CAL_DQS value */
592 for (byte = 0; byte < 4; byte++)
593 mmio_clrsetbits_32(PHY_REG(0, 57 + 128 * byte),
594 0xfff << 16,
595 sdram_params->rx_cal_dqs[0][byte]);
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800596 }
597 if (channel_mask & (1 << 1)) {
598 count = 0;
599 while (!(mmio_read_32(CTL_REG(1, 203)) & (1 << 3))) {
600 /* CKE is low, loop 10ms */
601 if (count > 100)
602 return -1;
603
604 sram_udelay(100);
605 count++;
606 }
607
608 mmio_clrbits_32(CTL_REG(1, 68), PWRUP_SREFRESH_EXIT);
Derek Basehore04c74b92017-01-31 00:20:19 -0800609
610 /* Restore the PHY_RX_CAL_DQS value */
611 for (byte = 0; byte < 4; byte++)
612 mmio_clrsetbits_32(PHY_REG(1, 57 + 128 * byte),
613 0xfff << 16,
614 sdram_params->rx_cal_dqs[1][byte]);
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800615 }
616
617 return 0;
618}
619
620void dmc_save(void)
621{
622 struct rk3399_sdram_params *sdram_params = &sdram_config;
Derek Basehore6af5af02017-05-05 17:53:33 -0700623 struct rk3399_ddr_publ_regs *phy_regs;
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800624 uint32_t *params_ctl;
625 uint32_t *params_pi;
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800626 uint32_t refdiv, postdiv2, postdiv1, fbdiv;
Derek Basehore6af5af02017-05-05 17:53:33 -0700627 uint32_t tmp, ch, byte, i;
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800628
Derek Basehore6af5af02017-05-05 17:53:33 -0700629 phy_regs = &sdram_params->phy_regs;
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800630 params_ctl = sdram_params->pctl_regs.denali_ctl;
631 params_pi = sdram_params->pi_regs.denali_pi;
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800632
633 fbdiv = mmio_read_32(CRU_BASE + CRU_PLL_CON(DPLL_ID, 0)) & 0xfff;
634 tmp = mmio_read_32(CRU_BASE + CRU_PLL_CON(DPLL_ID, 1));
635 postdiv2 = POSTDIV2_DEC(tmp);
636 postdiv1 = POSTDIV1_DEC(tmp);
637 refdiv = REFDIV_DEC(tmp);
638
639 sdram_params->ddr_freq = ((fbdiv * 24) /
640 (refdiv * postdiv1 * postdiv2)) * MHz;
641
642 INFO("sdram_params->ddr_freq = %d\n", sdram_params->ddr_freq);
643 sdram_params->odt = (((mmio_read_32(PHY_REG(0, 5)) >> 16) &
644 0x7) != 0) ? 1 : 0;
645
646 /* copy the registers CTL PI and PHY */
647 sram_regcpy((uintptr_t)&params_ctl[0], CTL_REG(0, 0), CTL_REG_NUM);
648
649 /* mask DENALI_CTL_00_DATA.START, only copy here, will trigger later */
650 params_ctl[0] &= ~(0x1 << 0);
651
652 sram_regcpy((uintptr_t)&params_pi[0], PI_REG(0, 0),
653 PI_REG_NUM);
654
655 /* mask DENALI_PI_00_DATA.START, only copy here, will trigger later*/
656 params_pi[0] &= ~(0x1 << 0);
657
Derek Basehore6af5af02017-05-05 17:53:33 -0700658 for (i = 0; i < 4; i++)
659 sram_regcpy((uintptr_t)&phy_regs->phy0[i][0],
660 PHY_REG(0, 128 * i), 91);
661
662 for (i = 0; i < 3; i++)
663 sram_regcpy((uintptr_t)&phy_regs->phy512[i][0],
664 PHY_REG(0, 512 + 128 * i), 38);
665
666 sram_regcpy((uintptr_t)&phy_regs->phy896[0], PHY_REG(0, 896), 63);
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800667
Derek Basehore04c74b92017-01-31 00:20:19 -0800668 for (ch = 0; ch < sdram_params->num_channels; ch++) {
669 for (byte = 0; byte < 4; byte++)
670 sdram_params->rx_cal_dqs[ch][byte] = (0xfff << 16) &
671 mmio_read_32(PHY_REG(ch, 57 + byte * 128));
672 }
673
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800674 /* set DENALI_PHY_957_DATA.PHY_DLL_RST_EN = 0x1 */
Derek Basehore6af5af02017-05-05 17:53:33 -0700675 phy_regs->phy896[957 - 896] &= ~(0x3 << 24);
676 phy_regs->phy896[957 - 896] |= 1 << 24;
677 phy_regs->phy896[0] |= 1;
678 phy_regs->phy896[0] &= ~(0x3 << 8);
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800679}
680
681__sramfunc void dmc_restore(void)
682{
683 struct rk3399_sdram_params *sdram_params = &sdram_config;
684 uint32_t channel_mask = 0;
685 uint32_t channel;
686
687 configure_sgrf();
688
689retry:
690 for (channel = 0; channel < sdram_params->num_channels; channel++) {
691 phy_pctrl_reset(channel);
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800692 pctl_cfg(channel, sdram_params);
693 }
694
695 for (channel = 0; channel < 2; channel++) {
696 if (sdram_params->ch[channel].col)
697 channel_mask |= 1 << channel;
698 }
699
700 if (pctl_start(channel_mask, sdram_params) < 0)
701 goto retry;
702
703 for (channel = 0; channel < sdram_params->num_channels; channel++) {
704 /* LPDDR2/LPDDR3 need to wait DAI complete, max 10us */
705 if (sdram_params->dramtype == LPDDR3)
706 sram_udelay(10);
707
708 /* If traning fail, retry to do it again. */
709 if (data_training(channel, sdram_params, PI_FULL_TRAINING))
710 goto retry;
711
712 set_ddrconfig(sdram_params, channel,
713 sdram_params->ch[channel].ddrconfig);
714 }
715
716 dram_all_config(sdram_params);
717
718 /* Switch to index 1 and prepare for DDR frequency switch. */
Derek Basehoree13bc542017-02-24 14:31:36 +0800719 dram_switch_to_next_index(sdram_params);
Caesar Wangf33eb2c2016-10-27 01:13:16 +0800720}