blob: 5193d1182807288df5f1eddb30cd0db672c198eb [file] [log] [blame]
Nicolas Le Bayon068d3412021-07-01 14:44:22 +02001/*
2 * Copyright (C) 2021-2024, STMicroelectronics - All Rights Reserved
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <errno.h>
8
9#include <common/debug.h>
10
11#include <ddrphy_phyinit.h>
12
13#include <drivers/delay_timer.h>
14#include <drivers/st/stm32mp2_ddr_helpers.h>
15#include <drivers/st/stm32mp2_ddr_regs.h>
16#include <drivers/st/stm32mp_ddr.h>
17
18#include <lib/mmio.h>
19
20#include <platform_def.h>
21
22#define DDRDBG_FRAC_PLL_LOCK U(0x10)
23
24#define DDRCTL_REG(x, y, z) \
25 { \
26 .offset = offsetof(struct stm32mp_ddrctl, x), \
27 .par_offset = offsetof(struct y, x), \
28 .qd = z \
29 }
30
31/*
32 * PARAMETERS: value get from device tree :
33 * size / order need to be aligned with binding
34 * modification NOT ALLOWED !!!
35 */
36#define DDRCTL_REG_REG_SIZE 48 /* st,ctl-reg */
37#define DDRCTL_REG_TIMING_SIZE 20 /* st,ctl-timing */
38#define DDRCTL_REG_MAP_SIZE 12 /* st,ctl-map */
39#if STM32MP_DDR_DUAL_AXI_PORT
40#define DDRCTL_REG_PERF_SIZE 21 /* st,ctl-perf */
41#else /* !STM32MP_DDR_DUAL_AXI_PORT */
42#define DDRCTL_REG_PERF_SIZE 14 /* st,ctl-perf */
43#endif /* STM32MP_DDR_DUAL_AXI_PORT */
44
45#define DDRPHY_REG_REG_SIZE 0 /* st,phy-reg */
46#define DDRPHY_REG_TIMING_SIZE 0 /* st,phy-timing */
47
48#define DDRCTL_REG_REG(x, z) DDRCTL_REG(x, stm32mp2_ddrctrl_reg, z)
49static const struct stm32mp_ddr_reg_desc ddr_reg[DDRCTL_REG_REG_SIZE] = {
50 DDRCTL_REG_REG(mstr, true),
51 DDRCTL_REG_REG(mrctrl0, false),
52 DDRCTL_REG_REG(mrctrl1, false),
53 DDRCTL_REG_REG(mrctrl2, false),
54 DDRCTL_REG_REG(derateen, true),
55 DDRCTL_REG_REG(derateint, false),
56 DDRCTL_REG_REG(deratectl, false),
57 DDRCTL_REG_REG(pwrctl, false),
58 DDRCTL_REG_REG(pwrtmg, true),
59 DDRCTL_REG_REG(hwlpctl, true),
60 DDRCTL_REG_REG(rfshctl0, false),
61 DDRCTL_REG_REG(rfshctl1, false),
62 DDRCTL_REG_REG(rfshctl3, true),
63 DDRCTL_REG_REG(crcparctl0, false),
64 DDRCTL_REG_REG(crcparctl1, false),
65 DDRCTL_REG_REG(init0, true),
66 DDRCTL_REG_REG(init1, false),
67 DDRCTL_REG_REG(init2, false),
68 DDRCTL_REG_REG(init3, true),
69 DDRCTL_REG_REG(init4, true),
70 DDRCTL_REG_REG(init5, false),
71 DDRCTL_REG_REG(init6, true),
72 DDRCTL_REG_REG(init7, true),
73 DDRCTL_REG_REG(dimmctl, false),
74 DDRCTL_REG_REG(rankctl, true),
75 DDRCTL_REG_REG(rankctl1, true),
76 DDRCTL_REG_REG(zqctl0, true),
77 DDRCTL_REG_REG(zqctl1, false),
78 DDRCTL_REG_REG(zqctl2, false),
79 DDRCTL_REG_REG(dfitmg0, true),
80 DDRCTL_REG_REG(dfitmg1, true),
81 DDRCTL_REG_REG(dfilpcfg0, false),
82 DDRCTL_REG_REG(dfilpcfg1, false),
83 DDRCTL_REG_REG(dfiupd0, true),
84 DDRCTL_REG_REG(dfiupd1, false),
85 DDRCTL_REG_REG(dfiupd2, false),
86 DDRCTL_REG_REG(dfimisc, true),
87 DDRCTL_REG_REG(dfitmg2, true),
88 DDRCTL_REG_REG(dfitmg3, false),
89 DDRCTL_REG_REG(dbictl, true),
90 DDRCTL_REG_REG(dfiphymstr, false),
91 DDRCTL_REG_REG(dbg0, false),
92 DDRCTL_REG_REG(dbg1, false),
93 DDRCTL_REG_REG(dbgcmd, false),
94 DDRCTL_REG_REG(swctl, false), /* forced qd value */
95 DDRCTL_REG_REG(swctlstatic, false),
96 DDRCTL_REG_REG(poisoncfg, false),
97 DDRCTL_REG_REG(pccfg, false),
98};
99
100#define DDRCTL_REG_TIMING(x, z) DDRCTL_REG(x, stm32mp2_ddrctrl_timing, z)
101static const struct stm32mp_ddr_reg_desc ddr_timing[DDRCTL_REG_TIMING_SIZE] = {
102 DDRCTL_REG_TIMING(rfshtmg, false),
103 DDRCTL_REG_TIMING(rfshtmg1, false),
104 DDRCTL_REG_TIMING(dramtmg0, true),
105 DDRCTL_REG_TIMING(dramtmg1, true),
106 DDRCTL_REG_TIMING(dramtmg2, true),
107 DDRCTL_REG_TIMING(dramtmg3, true),
108 DDRCTL_REG_TIMING(dramtmg4, true),
109 DDRCTL_REG_TIMING(dramtmg5, true),
110 DDRCTL_REG_TIMING(dramtmg6, true),
111 DDRCTL_REG_TIMING(dramtmg7, true),
112 DDRCTL_REG_TIMING(dramtmg8, true),
113 DDRCTL_REG_TIMING(dramtmg9, true),
114 DDRCTL_REG_TIMING(dramtmg10, true),
115 DDRCTL_REG_TIMING(dramtmg11, true),
116 DDRCTL_REG_TIMING(dramtmg12, true),
117 DDRCTL_REG_TIMING(dramtmg13, true),
118 DDRCTL_REG_TIMING(dramtmg14, true),
119 DDRCTL_REG_TIMING(dramtmg15, true),
120 DDRCTL_REG_TIMING(odtcfg, true),
121 DDRCTL_REG_TIMING(odtmap, false),
122};
123
124#define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp2_ddrctrl_map, false)
125static const struct stm32mp_ddr_reg_desc ddr_map[DDRCTL_REG_MAP_SIZE] = {
126 DDRCTL_REG_MAP(addrmap0),
127 DDRCTL_REG_MAP(addrmap1),
128 DDRCTL_REG_MAP(addrmap2),
129 DDRCTL_REG_MAP(addrmap3),
130 DDRCTL_REG_MAP(addrmap4),
131 DDRCTL_REG_MAP(addrmap5),
132 DDRCTL_REG_MAP(addrmap6),
133 DDRCTL_REG_MAP(addrmap7),
134 DDRCTL_REG_MAP(addrmap8),
135 DDRCTL_REG_MAP(addrmap9),
136 DDRCTL_REG_MAP(addrmap10),
137 DDRCTL_REG_MAP(addrmap11),
138};
139
140#define DDRCTL_REG_PERF(x, z) DDRCTL_REG(x, stm32mp2_ddrctrl_perf, z)
141static const struct stm32mp_ddr_reg_desc ddr_perf[DDRCTL_REG_PERF_SIZE] = {
142 DDRCTL_REG_PERF(sched, true),
143 DDRCTL_REG_PERF(sched1, false),
144 DDRCTL_REG_PERF(perfhpr1, true),
145 DDRCTL_REG_PERF(perflpr1, true),
146 DDRCTL_REG_PERF(perfwr1, true),
147 DDRCTL_REG_PERF(sched3, false),
148 DDRCTL_REG_PERF(sched4, false),
149 DDRCTL_REG_PERF(pcfgr_0, false),
150 DDRCTL_REG_PERF(pcfgw_0, false),
151 DDRCTL_REG_PERF(pctrl_0, false),
152 DDRCTL_REG_PERF(pcfgqos0_0, true),
153 DDRCTL_REG_PERF(pcfgqos1_0, true),
154 DDRCTL_REG_PERF(pcfgwqos0_0, true),
155 DDRCTL_REG_PERF(pcfgwqos1_0, true),
156#if STM32MP_DDR_DUAL_AXI_PORT
157 DDRCTL_REG_PERF(pcfgr_1, false),
158 DDRCTL_REG_PERF(pcfgw_1, false),
159 DDRCTL_REG_PERF(pctrl_1, false),
160 DDRCTL_REG_PERF(pcfgqos0_1, true),
161 DDRCTL_REG_PERF(pcfgqos1_1, true),
162 DDRCTL_REG_PERF(pcfgwqos0_1, true),
163 DDRCTL_REG_PERF(pcfgwqos1_1, true),
164#endif /* STM32MP_DDR_DUAL_AXI_PORT */
165};
166
167static const struct stm32mp_ddr_reg_desc ddrphy_reg[DDRPHY_REG_REG_SIZE] = {};
168
169static const struct stm32mp_ddr_reg_desc ddrphy_timing[DDRPHY_REG_TIMING_SIZE] = {};
170
171/*
172 * REGISTERS ARRAY: used to parse device tree and interactive mode
173 */
174static const struct stm32mp_ddr_reg_info ddr_registers[REG_TYPE_NB] __unused = {
175 [REG_REG] = {
176 .name = "static",
177 .desc = ddr_reg,
178 .size = DDRCTL_REG_REG_SIZE,
179 .base = DDR_BASE
180 },
181 [REG_TIMING] = {
182 .name = "timing",
183 .desc = ddr_timing,
184 .size = DDRCTL_REG_TIMING_SIZE,
185 .base = DDR_BASE
186 },
187 [REG_PERF] = {
188 .name = "perf",
189 .desc = ddr_perf,
190 .size = DDRCTL_REG_PERF_SIZE,
191 .base = DDR_BASE
192 },
193 [REG_MAP] = {
194 .name = "map",
195 .desc = ddr_map,
196 .size = DDRCTL_REG_MAP_SIZE,
197 .base = DDR_BASE
198 },
199 [REGPHY_REG] = {
200 .name = "static",
201 .desc = ddrphy_reg,
202 .size = DDRPHY_REG_REG_SIZE,
203 .base = DDRPHY_BASE
204 },
205 [REGPHY_TIMING] = {
206 .name = "timing",
207 .desc = ddrphy_timing,
208 .size = DDRPHY_REG_TIMING_SIZE,
209 .base = DDRPHY_BASE
210 },
211};
212
213static void ddr_reset(struct stm32mp_ddr_priv *priv)
214{
215 udelay(DDR_DELAY_1US);
216
217 mmio_setbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
218 mmio_write_32(priv->rcc + RCC_DDRPHYCAPBCFGR,
219 RCC_DDRPHYCAPBCFGR_DDRPHYCAPBEN | RCC_DDRPHYCAPBCFGR_DDRPHYCAPBLPEN |
220 RCC_DDRPHYCAPBCFGR_DDRPHYCAPBRST);
221 mmio_write_32(priv->rcc + RCC_DDRCAPBCFGR,
222 RCC_DDRCAPBCFGR_DDRCAPBEN | RCC_DDRCAPBCFGR_DDRCAPBLPEN |
223 RCC_DDRCAPBCFGR_DDRCAPBRST);
224 mmio_write_32(priv->rcc + RCC_DDRCFGR,
225 RCC_DDRCFGR_DDRCFGEN | RCC_DDRCFGR_DDRCFGLPEN | RCC_DDRCFGR_DDRCFGRST);
226
227 udelay(DDR_DELAY_1US);
228
229 mmio_setbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
230 mmio_write_32(priv->rcc + RCC_DDRPHYCAPBCFGR,
231 RCC_DDRPHYCAPBCFGR_DDRPHYCAPBEN | RCC_DDRPHYCAPBCFGR_DDRPHYCAPBLPEN);
232 mmio_write_32(priv->rcc + RCC_DDRCAPBCFGR,
233 RCC_DDRCAPBCFGR_DDRCAPBEN | RCC_DDRCAPBCFGR_DDRCAPBLPEN);
234 mmio_write_32(priv->rcc + RCC_DDRCFGR, RCC_DDRCFGR_DDRCFGEN | RCC_DDRCFGR_DDRCFGLPEN);
235
236 udelay(DDR_DELAY_1US);
237}
238
239static void ddr_standby_reset(struct stm32mp_ddr_priv *priv)
240{
241 udelay(DDR_DELAY_1US);
242
243 mmio_write_32(priv->rcc + RCC_DDRCPCFGR,
244 RCC_DDRCPCFGR_DDRCPEN | RCC_DDRCPCFGR_DDRCPLPEN | RCC_DDRCPCFGR_DDRCPRST);
245 mmio_setbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
246 mmio_write_32(priv->rcc + RCC_DDRPHYCAPBCFGR,
247 RCC_DDRPHYCAPBCFGR_DDRPHYCAPBEN | RCC_DDRPHYCAPBCFGR_DDRPHYCAPBLPEN |
248 RCC_DDRPHYCAPBCFGR_DDRPHYCAPBRST);
249 mmio_write_32(priv->rcc + RCC_DDRCAPBCFGR,
250 RCC_DDRCAPBCFGR_DDRCAPBEN | RCC_DDRCAPBCFGR_DDRCAPBLPEN |
251 RCC_DDRCAPBCFGR_DDRCAPBRST);
252
253 mmio_clrbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRPHYDLP);
254 mmio_setbits_32(priv->rcc + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
255
256 udelay(DDR_DELAY_1US);
257}
258
259static void ddr_standby_reset_release(struct stm32mp_ddr_priv *priv)
260{
261 udelay(DDR_DELAY_1US);
262
263 mmio_write_32(priv->rcc + RCC_DDRCPCFGR, RCC_DDRCPCFGR_DDRCPEN | RCC_DDRCPCFGR_DDRCPLPEN);
264 mmio_clrbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
265 mmio_clrbits_32(priv->rcc + RCC_DDRPHYCAPBCFGR, RCC_DDRPHYCAPBCFGR_DDRPHYCAPBRST);
266 mmio_write_32(priv->rcc + RCC_DDRCFGR, RCC_DDRCFGR_DDRCFGEN | RCC_DDRCFGR_DDRCFGLPEN);
267
268 udelay(DDR_DELAY_1US);
269}
270
271static void ddr_sysconf_configuration(struct stm32mp_ddr_priv *priv,
272 struct stm32mp_ddr_config *config)
273{
274 mmio_write_32(stm32_ddrdbg_get_base() + DDRDBG_LP_DISABLE,
275 DDRDBG_LP_DISABLE_LPI_XPI_DISABLE | DDRDBG_LP_DISABLE_LPI_DDRC_DISABLE);
276
277 mmio_write_32(stm32_ddrdbg_get_base() + DDRDBG_BYPASS_PCLKEN,
278 (uint32_t)config->uib.pllbypass);
279
280 mmio_write_32(priv->rcc + RCC_DDRPHYCCFGR, RCC_DDRPHYCCFGR_DDRPHYCEN);
281 mmio_setbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
282
283 udelay(DDR_DELAY_1US);
284}
285
286static void set_dfi_init_complete_en(struct stm32mp_ddrctl *ctl, bool phy_init_done)
287{
288 /*
289 * Manage quasi-dynamic registers modification
290 * dfimisc.dfi_init_complete_en : Group 3
291 */
292 stm32mp_ddr_set_qd3_update_conditions(ctl);
293
294 udelay(DDR_DELAY_1US);
295
296 if (phy_init_done) {
297 /* Indicates to controller that PHY has completed initialization */
298 mmio_setbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
299 } else {
300 /* PHY not initialized yet, wait for completion */
301 mmio_clrbits_32((uintptr_t)&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
302 }
303
304 udelay(DDR_DELAY_1US);
305
306 stm32mp_ddr_unset_qd3_update_conditions(ctl);
307
308}
309
310static void disable_refresh(struct stm32mp_ddrctl *ctl)
311{
312 mmio_setbits_32((uintptr_t)&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
313
314 stm32mp_ddr_wait_refresh_update_done_ack(ctl);
315
316 udelay(DDR_DELAY_1US);
317
318 mmio_clrbits_32((uintptr_t)&ctl->pwrctl,
319 DDRCTRL_PWRCTL_POWERDOWN_EN | DDRCTRL_PWRCTL_SELFREF_EN);
320
321 udelay(DDR_DELAY_1US);
322
323 set_dfi_init_complete_en(ctl, false);
324}
325
326static void restore_refresh(struct stm32mp_ddrctl *ctl, uint32_t rfshctl3, uint32_t pwrctl)
327{
328 if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
329 mmio_clrbits_32((uintptr_t)&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
330
331 stm32mp_ddr_wait_refresh_update_done_ack(ctl);
332
333 udelay(DDR_DELAY_1US);
334 }
335
336 if ((pwrctl & DDRCTRL_PWRCTL_SELFREF_SW) != 0U) {
337 mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_SELFREF_SW);
338
339 udelay(DDR_DELAY_1US);
340 }
341
342 if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
343 mmio_setbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
344
345 udelay(DDR_DELAY_1US);
346 }
347
348 if ((pwrctl & DDRCTRL_PWRCTL_SELFREF_EN) != 0U) {
349 mmio_setbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_SELFREF_EN);
350
351 udelay(DDR_DELAY_1US);
352 }
353
354 set_dfi_init_complete_en(ctl, true);
355}
356
357void stm32mp2_ddr_init(struct stm32mp_ddr_priv *priv,
358 struct stm32mp_ddr_config *config)
359{
360 int ret = -EINVAL;
361 uint32_t ddr_retdis;
362 enum ddr_type ddr_type;
363
364 if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
365 ddr_type = STM32MP_DDR3;
366 } else if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR4) != 0U) {
367 ddr_type = STM32MP_DDR4;
368 } else if ((config->c_reg.mstr & DDRCTRL_MSTR_LPDDR4) != 0U) {
369 ddr_type = STM32MP_LPDDR4;
370 } else {
371 ERROR("DDR type not supported\n");
372 panic();
373 }
374
375 VERBOSE("name = %s\n", config->info.name);
376 VERBOSE("speed = %u kHz\n", config->info.speed);
377 VERBOSE("size = 0x%zx\n", config->info.size);
378 if (config->self_refresh) {
379 VERBOSE("sel-refresh exit (zdata = 0x%x)\n", config->zdata);
380 }
381
382 /* Check DDR PHY pads retention */
383 ddr_retdis = mmio_read_32(priv->pwr + PWR_CR11) & PWR_CR11_DDRRETDIS;
384 if (config->self_refresh) {
385 if (ddr_retdis == PWR_CR11_DDRRETDIS) {
386 VERBOSE("self-refresh aborted: no retention\n");
387 config->self_refresh = false;
388 }
389 }
390
391 if (config->self_refresh) {
392 ddr_standby_reset(priv);
393
394 VERBOSE("disable DDR PHY retention\n");
395 mmio_setbits_32(priv->pwr + PWR_CR11, PWR_CR11_DDRRETDIS);
396
397 udelay(DDR_DELAY_1US);
398
399 mmio_clrbits_32(priv->rcc + RCC_DDRCAPBCFGR, RCC_DDRCAPBCFGR_DDRCAPBRST);
400
401 udelay(DDR_DELAY_1US);
402
403 } else {
404 if (stm32mp_board_ddr_power_init(ddr_type) != 0) {
405 ERROR("DDR power init failed\n");
406 panic();
407 }
408
409 VERBOSE("disable DDR PHY retention\n");
410 mmio_setbits_32(priv->pwr + PWR_CR11, PWR_CR11_DDRRETDIS);
411
412 ddr_reset(priv);
413
414 ddr_sysconf_configuration(priv, config);
415 }
416
417#if STM32MP_LPDDR4_TYPE
418 /*
419 * Enable PWRCTL.SELFREF_SW to ensure correct setting of PWRCTL.LPDDR4_SR_ALLOWED.
420 * Later disabled in restore_refresh().
421 */
422 config->c_reg.pwrctl |= DDRCTRL_PWRCTL_SELFREF_SW;
423#endif /* STM32MP_LPDDR4_TYPE */
424
425 stm32mp_ddr_set_reg(priv, REG_REG, &config->c_reg, ddr_registers);
426 stm32mp_ddr_set_reg(priv, REG_TIMING, &config->c_timing, ddr_registers);
427 stm32mp_ddr_set_reg(priv, REG_MAP, &config->c_map, ddr_registers);
428 stm32mp_ddr_set_reg(priv, REG_PERF, &config->c_perf, ddr_registers);
429
430 if (!config->self_refresh) {
431 /* DDR core and PHY reset de-assert */
432 mmio_clrbits_32(priv->rcc + RCC_DDRITFCFGR, RCC_DDRITFCFGR_DDRRST);
433
434 disable_refresh(priv->ctl);
435 }
436
437 if (config->self_refresh) {
438 ddr_standby_reset_release(priv);
439
440 /* Initialize DDR by skipping training and disabling result saving */
441 ret = ddrphy_phyinit_sequence(config, true, false);
442
443 if (ret == 0) {
444 ret = ddrphy_phyinit_restore_sequence();
445 }
446
447 /* Poll on ddrphy_initeng0_phyinlpx.phyinlp3 = 0 */
448 ddr_wait_lp3_mode(false);
449 } else {
450 /* Initialize DDR including training and result saving */
451 ret = ddrphy_phyinit_sequence(config, false, true);
452 }
453
454 if (ret != 0) {
455 ERROR("DDR PHY init: Error %d\n", ret);
456 panic();
457 }
458
459 ddr_activate_controller(priv->ctl, false);
460
461 if (config->self_refresh) {
462 struct stm32mp_ddrctl *ctl = priv->ctl;
463
464 /* SW self refresh exit prequested */
465 mmio_clrbits_32((uintptr_t)&ctl->pwrctl, DDRCTRL_PWRCTL_SELFREF_SW);
466
467 if (ddr_sr_exit_loop() != 0) {
468 ERROR("DDR Standby exit error\n");
469 panic();
470 }
471
472 /* Re-enable DFI low-power interface */
473 mmio_clrbits_32((uintptr_t)&ctl->dfilpcfg0, DDRCTRL_DFILPCFG0_DFI_LP_EN_SR);
474 } else {
475 restore_refresh(priv->ctl, config->c_reg.rfshctl3, config->c_reg.pwrctl);
476 }
477
478 stm32mp_ddr_enable_axi_port(priv->ctl);
479}