blob: eed1d7613d4a4d6e43239f9c2b76c2033d570214 [file] [log] [blame]
Yann Gautiercaf575b2018-07-24 17:18:19 +02001/*
2 * Copyright (C) 2018, STMicroelectronics - All Rights Reserved
3 *
4 * SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <arch_helpers.h>
9#include <debug.h>
10#include <delay_timer.h>
11#include <dt-bindings/clock/stm32mp1-clks.h>
12#include <mmio.h>
13#include <platform.h>
14#include <stddef.h>
15#include <stm32mp1_clk.h>
16#include <stm32mp1_ddr.h>
17#include <stm32mp1_ddr_regs.h>
18#include <stm32mp1_dt.h>
19#include <stm32mp1_pmic.h>
20#include <stm32mp1_pwr.h>
21#include <stm32mp1_ram.h>
22#include <stm32mp1_rcc.h>
23
24struct reg_desc {
25 const char *name;
26 uint16_t offset; /* Offset for base address */
27 uint8_t par_offset; /* Offset for parameter array */
28};
29
30#define INVALID_OFFSET 0xFFU
31
32#define TIMESLOT_1US (plat_get_syscnt_freq2() / 1000000U)
33
34#define DDRCTL_REG(x, y) \
35 { \
36 .name = #x, \
37 .offset = offsetof(struct stm32mp1_ddrctl, x), \
38 .par_offset = offsetof(struct y, x) \
39 }
40
41#define DDRPHY_REG(x, y) \
42 { \
43 .name = #x, \
44 .offset = offsetof(struct stm32mp1_ddrphy, x), \
45 .par_offset = offsetof(struct y, x) \
46 }
47
48#define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
49static const struct reg_desc ddr_reg[] = {
50 DDRCTL_REG_REG(mstr),
51 DDRCTL_REG_REG(mrctrl0),
52 DDRCTL_REG_REG(mrctrl1),
53 DDRCTL_REG_REG(derateen),
54 DDRCTL_REG_REG(derateint),
55 DDRCTL_REG_REG(pwrctl),
56 DDRCTL_REG_REG(pwrtmg),
57 DDRCTL_REG_REG(hwlpctl),
58 DDRCTL_REG_REG(rfshctl0),
59 DDRCTL_REG_REG(rfshctl3),
60 DDRCTL_REG_REG(crcparctl0),
61 DDRCTL_REG_REG(zqctl0),
62 DDRCTL_REG_REG(dfitmg0),
63 DDRCTL_REG_REG(dfitmg1),
64 DDRCTL_REG_REG(dfilpcfg0),
65 DDRCTL_REG_REG(dfiupd0),
66 DDRCTL_REG_REG(dfiupd1),
67 DDRCTL_REG_REG(dfiupd2),
68 DDRCTL_REG_REG(dfiphymstr),
69 DDRCTL_REG_REG(odtmap),
70 DDRCTL_REG_REG(dbg0),
71 DDRCTL_REG_REG(dbg1),
72 DDRCTL_REG_REG(dbgcmd),
73 DDRCTL_REG_REG(poisoncfg),
74 DDRCTL_REG_REG(pccfg),
75};
76
77#define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
78static const struct reg_desc ddr_timing[] = {
79 DDRCTL_REG_TIMING(rfshtmg),
80 DDRCTL_REG_TIMING(dramtmg0),
81 DDRCTL_REG_TIMING(dramtmg1),
82 DDRCTL_REG_TIMING(dramtmg2),
83 DDRCTL_REG_TIMING(dramtmg3),
84 DDRCTL_REG_TIMING(dramtmg4),
85 DDRCTL_REG_TIMING(dramtmg5),
86 DDRCTL_REG_TIMING(dramtmg6),
87 DDRCTL_REG_TIMING(dramtmg7),
88 DDRCTL_REG_TIMING(dramtmg8),
89 DDRCTL_REG_TIMING(dramtmg14),
90 DDRCTL_REG_TIMING(odtcfg),
91};
92
93#define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map)
94static const struct reg_desc ddr_map[] = {
95 DDRCTL_REG_MAP(addrmap1),
96 DDRCTL_REG_MAP(addrmap2),
97 DDRCTL_REG_MAP(addrmap3),
98 DDRCTL_REG_MAP(addrmap4),
99 DDRCTL_REG_MAP(addrmap5),
100 DDRCTL_REG_MAP(addrmap6),
101 DDRCTL_REG_MAP(addrmap9),
102 DDRCTL_REG_MAP(addrmap10),
103 DDRCTL_REG_MAP(addrmap11),
104};
105
106#define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
107static const struct reg_desc ddr_perf[] = {
108 DDRCTL_REG_PERF(sched),
109 DDRCTL_REG_PERF(sched1),
110 DDRCTL_REG_PERF(perfhpr1),
111 DDRCTL_REG_PERF(perflpr1),
112 DDRCTL_REG_PERF(perfwr1),
113 DDRCTL_REG_PERF(pcfgr_0),
114 DDRCTL_REG_PERF(pcfgw_0),
115 DDRCTL_REG_PERF(pcfgqos0_0),
116 DDRCTL_REG_PERF(pcfgqos1_0),
117 DDRCTL_REG_PERF(pcfgwqos0_0),
118 DDRCTL_REG_PERF(pcfgwqos1_0),
119 DDRCTL_REG_PERF(pcfgr_1),
120 DDRCTL_REG_PERF(pcfgw_1),
121 DDRCTL_REG_PERF(pcfgqos0_1),
122 DDRCTL_REG_PERF(pcfgqos1_1),
123 DDRCTL_REG_PERF(pcfgwqos0_1),
124 DDRCTL_REG_PERF(pcfgwqos1_1),
125};
126
127#define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg)
128static const struct reg_desc ddrphy_reg[] = {
129 DDRPHY_REG_REG(pgcr),
130 DDRPHY_REG_REG(aciocr),
131 DDRPHY_REG_REG(dxccr),
132 DDRPHY_REG_REG(dsgcr),
133 DDRPHY_REG_REG(dcr),
134 DDRPHY_REG_REG(odtcr),
135 DDRPHY_REG_REG(zq0cr1),
136 DDRPHY_REG_REG(dx0gcr),
137 DDRPHY_REG_REG(dx1gcr),
138 DDRPHY_REG_REG(dx2gcr),
139 DDRPHY_REG_REG(dx3gcr),
140};
141
142#define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing)
143static const struct reg_desc ddrphy_timing[] = {
144 DDRPHY_REG_TIMING(ptr0),
145 DDRPHY_REG_TIMING(ptr1),
146 DDRPHY_REG_TIMING(ptr2),
147 DDRPHY_REG_TIMING(dtpr0),
148 DDRPHY_REG_TIMING(dtpr1),
149 DDRPHY_REG_TIMING(dtpr2),
150 DDRPHY_REG_TIMING(mr0),
151 DDRPHY_REG_TIMING(mr1),
152 DDRPHY_REG_TIMING(mr2),
153 DDRPHY_REG_TIMING(mr3),
154};
155
156#define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal)
157static const struct reg_desc ddrphy_cal[] = {
158 DDRPHY_REG_CAL(dx0dllcr),
159 DDRPHY_REG_CAL(dx0dqtr),
160 DDRPHY_REG_CAL(dx0dqstr),
161 DDRPHY_REG_CAL(dx1dllcr),
162 DDRPHY_REG_CAL(dx1dqtr),
163 DDRPHY_REG_CAL(dx1dqstr),
164 DDRPHY_REG_CAL(dx2dllcr),
165 DDRPHY_REG_CAL(dx2dqtr),
166 DDRPHY_REG_CAL(dx2dqstr),
167 DDRPHY_REG_CAL(dx3dllcr),
168 DDRPHY_REG_CAL(dx3dqtr),
169 DDRPHY_REG_CAL(dx3dqstr),
170};
171
172#define DDR_REG_DYN(x) \
173 { \
174 .name = #x, \
175 .offset = offsetof(struct stm32mp1_ddrctl, x), \
176 .par_offset = INVALID_OFFSET \
177 }
178
179static const struct reg_desc ddr_dyn[] = {
180 DDR_REG_DYN(stat),
181 DDR_REG_DYN(init0),
182 DDR_REG_DYN(dfimisc),
183 DDR_REG_DYN(dfistat),
184 DDR_REG_DYN(swctl),
185 DDR_REG_DYN(swstat),
186 DDR_REG_DYN(pctrl_0),
187 DDR_REG_DYN(pctrl_1),
188};
189
190#define DDRPHY_REG_DYN(x) \
191 { \
192 .name = #x, \
193 .offset = offsetof(struct stm32mp1_ddrphy, x), \
194 .par_offset = INVALID_OFFSET \
195 }
196
197static const struct reg_desc ddrphy_dyn[] = {
198 DDRPHY_REG_DYN(pir),
199 DDRPHY_REG_DYN(pgsr),
200};
201
202enum reg_type {
203 REG_REG,
204 REG_TIMING,
205 REG_PERF,
206 REG_MAP,
207 REGPHY_REG,
208 REGPHY_TIMING,
209 REGPHY_CAL,
210/*
211 * Dynamic registers => managed in driver or not changed,
212 * can be dumped in interactive mode.
213 */
214 REG_DYN,
215 REGPHY_DYN,
216 REG_TYPE_NB
217};
218
219enum base_type {
220 DDR_BASE,
221 DDRPHY_BASE,
222 NONE_BASE
223};
224
225struct ddr_reg_info {
226 const char *name;
227 const struct reg_desc *desc;
228 uint8_t size;
229 enum base_type base;
230};
231
232static const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
233 [REG_REG] = {
234 "static", ddr_reg, ARRAY_SIZE(ddr_reg), DDR_BASE
235 },
236 [REG_TIMING] = {
237 "timing", ddr_timing, ARRAY_SIZE(ddr_timing), DDR_BASE
238 },
239 [REG_PERF] = {
240 "perf", ddr_perf, ARRAY_SIZE(ddr_perf), DDR_BASE
241 },
242 [REG_MAP] = {
243 "map", ddr_map, ARRAY_SIZE(ddr_map), DDR_BASE
244 },
245 [REGPHY_REG] = {
246 "static", ddrphy_reg, ARRAY_SIZE(ddrphy_reg), DDRPHY_BASE
247 },
248 [REGPHY_TIMING] = {
249 "timing", ddrphy_timing, ARRAY_SIZE(ddrphy_timing), DDRPHY_BASE
250 },
251 [REGPHY_CAL] = {
252 "cal", ddrphy_cal, ARRAY_SIZE(ddrphy_cal), DDRPHY_BASE
253 },
254 [REG_DYN] = {
255 "dyn", ddr_dyn, ARRAY_SIZE(ddr_dyn), DDR_BASE
256 },
257 [REGPHY_DYN] = {
258 "dyn", ddrphy_dyn, ARRAY_SIZE(ddrphy_dyn), DDRPHY_BASE
259 },
260};
261
262static uint32_t get_base_addr(const struct ddr_info *priv, enum base_type base)
263{
264 if (base == DDRPHY_BASE) {
265 return (uint32_t)priv->phy;
266 } else {
267 return (uint32_t)priv->ctl;
268 }
269}
270
271static void set_reg(const struct ddr_info *priv,
272 enum reg_type type,
273 const void *param)
274{
275 unsigned int i;
276 unsigned int *ptr, value;
277 enum base_type base = ddr_registers[type].base;
278 uint32_t base_addr = get_base_addr(priv, base);
279 const struct reg_desc *desc = ddr_registers[type].desc;
280
281 VERBOSE("init %s\n", ddr_registers[type].name);
282 for (i = 0; i < ddr_registers[type].size; i++) {
283 ptr = (unsigned int *)(base_addr + desc[i].offset);
284 if (desc[i].par_offset == INVALID_OFFSET) {
285 ERROR("invalid parameter offset for %s", desc[i].name);
286 panic();
287 } else {
288 value = *((uint32_t *)((uint32_t)param +
289 desc[i].par_offset));
290 mmio_write_32((uint32_t)ptr, value);
291 }
292 }
293}
294
295static void stm32mp1_ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
296{
297 uint32_t pgsr;
298 int error = 0;
299 unsigned long start;
300 unsigned long time0, time;
301
302 start = get_timer(0);
303 time0 = start;
304
305 do {
306 pgsr = mmio_read_32((uint32_t)&phy->pgsr);
307 time = get_timer(start);
308 if (time != time0) {
309 VERBOSE(" > [0x%x] pgsr = 0x%x &\n",
310 (uint32_t)&phy->pgsr, pgsr);
311 VERBOSE(" [0x%x] pir = 0x%x (time=%x)\n",
312 (uint32_t)&phy->pir,
313 mmio_read_32((uint32_t)&phy->pir),
314 (uint32_t)time);
315 }
316
317 time0 = time;
318 if (time > plat_get_syscnt_freq2()) {
319 panic();
320 }
321 if ((pgsr & DDRPHYC_PGSR_DTERR) != 0U) {
322 VERBOSE("DQS Gate Trainig Error\n");
323 error++;
324 }
325 if ((pgsr & DDRPHYC_PGSR_DTIERR) != 0U) {
326 VERBOSE("DQS Gate Trainig Intermittent Error\n");
327 error++;
328 }
329 if ((pgsr & DDRPHYC_PGSR_DFTERR) != 0U) {
330 VERBOSE("DQS Drift Error\n");
331 error++;
332 }
333 if ((pgsr & DDRPHYC_PGSR_RVERR) != 0U) {
334 VERBOSE("Read Valid Training Error\n");
335 error++;
336 }
337 if ((pgsr & DDRPHYC_PGSR_RVEIRR) != 0U) {
338 VERBOSE("Read Valid Training Intermittent Error\n");
339 error++;
340 }
341 } while ((pgsr & DDRPHYC_PGSR_IDONE) == 0U && error == 0);
342 VERBOSE("\n[0x%x] pgsr = 0x%x\n",
343 (uint32_t)&phy->pgsr, pgsr);
344}
345
346static void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, uint32_t pir)
347{
348 uint32_t pir_init = pir | DDRPHYC_PIR_INIT;
349
350 mmio_write_32((uint32_t)&phy->pir, pir_init);
351 VERBOSE("[0x%x] pir = 0x%x -> 0x%x\n",
352 (uint32_t)&phy->pir, pir_init,
353 mmio_read_32((uint32_t)&phy->pir));
354
355 /* Need to wait 10 configuration clock before start polling */
356 udelay(10);
357
358 /* Wait DRAM initialization and Gate Training Evaluation complete */
359 stm32mp1_ddrphy_idone_wait(phy);
360}
361
362/* Start quasi dynamic register update */
363static void stm32mp1_start_sw_done(struct stm32mp1_ddrctl *ctl)
364{
365 mmio_clrbits_32((uint32_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
366 VERBOSE("[0x%x] swctl = 0x%x\n",
367 (uint32_t)&ctl->swctl, mmio_read_32((uint32_t)&ctl->swctl));
368}
369
370/* Wait quasi dynamic register update */
371static void stm32mp1_wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
372{
373 unsigned long start;
374 uint32_t swstat;
375
376 mmio_setbits_32((uint32_t)&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
377 VERBOSE("[0x%x] swctl = 0x%x\n",
378 (uint32_t)&ctl->swctl, mmio_read_32((uint32_t)&ctl->swctl));
379
380 start = get_timer(0);
381 do {
382 swstat = mmio_read_32((uint32_t)&ctl->swstat);
383 VERBOSE("[0x%x] swstat = 0x%x ",
384 (uint32_t)&ctl->swstat, swstat);
385 VERBOSE("timer in ms 0x%x = start 0x%lx\r",
386 get_timer(0), start);
387 if (get_timer(start) > plat_get_syscnt_freq2()) {
388 panic();
389 }
390 } while ((swstat & DDRCTRL_SWSTAT_SW_DONE_ACK) == 0U);
391
392 VERBOSE("[0x%x] swstat = 0x%x\n",
393 (uint32_t)&ctl->swstat, swstat);
394}
395
396/* Wait quasi dynamic register update */
397static void stm32mp1_wait_operating_mode(struct ddr_info *priv, uint32_t mode)
398{
399 unsigned long start;
400 uint32_t stat;
401 uint32_t operating_mode;
402 uint32_t selref_type;
403 int break_loop = 0;
404
405 start = get_timer(0);
406 for ( ; ; ) {
407 stat = mmio_read_32((uint32_t)&priv->ctl->stat);
408 operating_mode = stat & DDRCTRL_STAT_OPERATING_MODE_MASK;
409 selref_type = stat & DDRCTRL_STAT_SELFREF_TYPE_MASK;
410 VERBOSE("[0x%x] stat = 0x%x\n",
411 (uint32_t)&priv->ctl->stat, stat);
412 VERBOSE("timer in ms 0x%x = start 0x%lx\r",
413 get_timer(0), start);
414 if (get_timer(start) > plat_get_syscnt_freq2()) {
415 panic();
416 }
417
418 if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
419 /*
420 * Self-refresh due to software
421 * => checking also STAT.selfref_type.
422 */
423 if ((operating_mode ==
424 DDRCTRL_STAT_OPERATING_MODE_SR) &&
425 (selref_type == DDRCTRL_STAT_SELFREF_TYPE_SR)) {
426 break_loop = 1;
427 }
428 } else if (operating_mode == mode) {
429 break_loop = 1;
430 } else if ((mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) &&
431 (operating_mode == DDRCTRL_STAT_OPERATING_MODE_SR) &&
432 (selref_type == DDRCTRL_STAT_SELFREF_TYPE_ASR)) {
433 /* Normal mode: handle also automatic self refresh */
434 break_loop = 1;
435 }
436
437 if (break_loop == 1) {
438 break;
439 }
440 }
441
442 VERBOSE("[0x%x] stat = 0x%x\n",
443 (uint32_t)&priv->ctl->stat, stat);
444}
445
446/* Mode Register Writes (MRW or MRS) */
447static void stm32mp1_mode_register_write(struct ddr_info *priv, uint8_t addr,
448 uint32_t data)
449{
450 uint32_t mrctrl0;
451
452 VERBOSE("MRS: %d = %x\n", addr, data);
453
454 /*
455 * 1. Poll MRSTAT.mr_wr_busy until it is '0'.
456 * This checks that there is no outstanding MR transaction.
457 * No write should be performed to MRCTRL0 and MRCTRL1
458 * if MRSTAT.mr_wr_busy = 1.
459 */
460 while ((mmio_read_32((uint32_t)&priv->ctl->mrstat) &
461 DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
462 ;
463 }
464
465 /*
466 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank
467 * and (for MRWs) MRCTRL1.mr_data to define the MR transaction.
468 */
469 mrctrl0 = DDRCTRL_MRCTRL0_MR_TYPE_WRITE |
470 DDRCTRL_MRCTRL0_MR_RANK_ALL |
471 (((uint32_t)addr << DDRCTRL_MRCTRL0_MR_ADDR_SHIFT) &
472 DDRCTRL_MRCTRL0_MR_ADDR_MASK);
473 mmio_write_32((uint32_t)&priv->ctl->mrctrl0, mrctrl0);
474 VERBOSE("[0x%x] mrctrl0 = 0x%x (0x%x)\n",
475 (uint32_t)&priv->ctl->mrctrl0,
476 mmio_read_32((uint32_t)&priv->ctl->mrctrl0), mrctrl0);
477 mmio_write_32((uint32_t)&priv->ctl->mrctrl1, data);
478 VERBOSE("[0x%x] mrctrl1 = 0x%x\n",
479 (uint32_t)&priv->ctl->mrctrl1,
480 mmio_read_32((uint32_t)&priv->ctl->mrctrl1));
481
482 /*
483 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1. This
484 * bit is self-clearing, and triggers the MR transaction.
485 * The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs
486 * the MR transaction to SDRAM, and no further access can be
487 * initiated until it is deasserted.
488 */
489 mrctrl0 |= DDRCTRL_MRCTRL0_MR_WR;
490 mmio_write_32((uint32_t)&priv->ctl->mrctrl0, mrctrl0);
491
492 while ((mmio_read_32((uint32_t)&priv->ctl->mrstat) &
493 DDRCTRL_MRSTAT_MR_WR_BUSY) != 0U) {
494 ;
495 }
496
497 VERBOSE("[0x%x] mrctrl0 = 0x%x\n",
498 (uint32_t)&priv->ctl->mrctrl0, mrctrl0);
499}
500
501/* Switch DDR3 from DLL-on to DLL-off */
502static void stm32mp1_ddr3_dll_off(struct ddr_info *priv)
503{
504 uint32_t mr1 = mmio_read_32((uint32_t)&priv->phy->mr1);
505 uint32_t mr2 = mmio_read_32((uint32_t)&priv->phy->mr2);
506 uint32_t dbgcam;
507
508 VERBOSE("mr1: 0x%x\n", mr1);
509 VERBOSE("mr2: 0x%x\n", mr2);
510
511 /*
512 * 1. Set the DBG1.dis_hif = 1.
513 * This prevents further reads/writes being received on the HIF.
514 */
515 mmio_setbits_32((uint32_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
516 VERBOSE("[0x%x] dbg1 = 0x%x\n",
517 (uint32_t)&priv->ctl->dbg1,
518 mmio_read_32((uint32_t)&priv->ctl->dbg1));
519
520 /*
521 * 2. Ensure all commands have been flushed from the uMCTL2 by polling
522 * DBGCAM.wr_data_pipeline_empty = 1,
523 * DBGCAM.rd_data_pipeline_empty = 1,
524 * DBGCAM.dbg_wr_q_depth = 0 ,
525 * DBGCAM.dbg_lpr_q_depth = 0, and
526 * DBGCAM.dbg_hpr_q_depth = 0.
527 */
528 do {
529 dbgcam = mmio_read_32((uint32_t)&priv->ctl->dbgcam);
530 VERBOSE("[0x%x] dbgcam = 0x%x\n",
531 (uint32_t)&priv->ctl->dbgcam, dbgcam);
532 } while ((((dbgcam & DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY) ==
533 DDRCTRL_DBGCAM_DATA_PIPELINE_EMPTY)) &&
534 ((dbgcam & DDRCTRL_DBGCAM_DBG_Q_DEPTH) == 0U));
535
536 /*
537 * 3. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
538 * to disable RTT_NOM:
539 * a. DDR3: Write to MR1[9], MR1[6] and MR1[2]
540 * b. DDR4: Write to MR1[10:8]
541 */
542 mr1 &= ~(BIT(9) | BIT(6) | BIT(2));
543 stm32mp1_mode_register_write(priv, 1, mr1);
544
545 /*
546 * 4. For DDR4 only: Perform an MRS command
547 * (using MRCTRL0 and MRCTRL1 registers) to write to MR5[8:6]
548 * to disable RTT_PARK
549 */
550
551 /*
552 * 5. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
553 * to write to MR2[10:9], to disable RTT_WR
554 * (and therefore disable dynamic ODT).
555 * This applies for both DDR3 and DDR4.
556 */
557 mr2 &= ~GENMASK(10, 9);
558 stm32mp1_mode_register_write(priv, 2, mr2);
559
560 /*
561 * 6. Perform an MRS command (using MRCTRL0 and MRCTRL1 registers)
562 * to disable the DLL. The timing of this MRS is automatically
563 * handled by the uMCTL2.
564 * a. DDR3: Write to MR1[0]
565 * b. DDR4: Write to MR1[0]
566 */
567 mr1 |= BIT(0);
568 stm32mp1_mode_register_write(priv, 1, mr1);
569
570 /*
571 * 7. Put the SDRAM into self-refresh mode by setting
572 * PWRCTL.selfref_sw = 1, and polling STAT.operating_mode to ensure
573 * the DDRC has entered self-refresh.
574 */
575 mmio_setbits_32((uint32_t)&priv->ctl->pwrctl,
576 DDRCTRL_PWRCTL_SELFREF_SW);
577 VERBOSE("[0x%x] pwrctl = 0x%x\n",
578 (uint32_t)&priv->ctl->pwrctl,
579 mmio_read_32((uint32_t)&priv->ctl->pwrctl));
580
581 /*
582 * 8. Wait until STAT.operating_mode[1:0]==11 indicating that the
583 * DWC_ddr_umctl2 core is in self-refresh mode.
584 * Ensure transition to self-refresh was due to software
585 * by checking that STAT.selfref_type[1:0]=2.
586 */
587 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_SR);
588
589 /*
590 * 9. Set the MSTR.dll_off_mode = 1.
591 * warning: MSTR.dll_off_mode is a quasi-dynamic type 2 field
592 */
593 stm32mp1_start_sw_done(priv->ctl);
594
595 mmio_setbits_32((uint32_t)&priv->ctl->mstr, DDRCTRL_MSTR_DLL_OFF_MODE);
596 VERBOSE("[0x%x] mstr = 0x%x\n",
597 (uint32_t)&priv->ctl->mstr,
598 mmio_read_32((uint32_t)&priv->ctl->mstr));
599
600 stm32mp1_wait_sw_done_ack(priv->ctl);
601
602 /* 10. Change the clock frequency to the desired value. */
603
604 /*
605 * 11. Update any registers which may be required to change for the new
606 * frequency. This includes static and dynamic registers.
607 * This includes both uMCTL2 registers and PHY registers.
608 */
609
610 /* Change Bypass Mode Frequency Range */
611 if (stm32mp1_clk_get_rate(DDRPHYC) < 100000000U) {
612 mmio_clrbits_32((uint32_t)&priv->phy->dllgcr,
613 DDRPHYC_DLLGCR_BPS200);
614 } else {
615 mmio_setbits_32((uint32_t)&priv->phy->dllgcr,
616 DDRPHYC_DLLGCR_BPS200);
617 }
618
619 mmio_setbits_32((uint32_t)&priv->phy->acdllcr, DDRPHYC_ACDLLCR_DLLDIS);
620
621 mmio_setbits_32((uint32_t)&priv->phy->dx0dllcr,
622 DDRPHYC_DXNDLLCR_DLLDIS);
623 mmio_setbits_32((uint32_t)&priv->phy->dx1dllcr,
624 DDRPHYC_DXNDLLCR_DLLDIS);
625 mmio_setbits_32((uint32_t)&priv->phy->dx2dllcr,
626 DDRPHYC_DXNDLLCR_DLLDIS);
627 mmio_setbits_32((uint32_t)&priv->phy->dx3dllcr,
628 DDRPHYC_DXNDLLCR_DLLDIS);
629
630 /* 12. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */
631 mmio_clrbits_32((uint32_t)&priv->ctl->pwrctl,
632 DDRCTRL_PWRCTL_SELFREF_SW);
633 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
634
635 /*
636 * 13. If ZQCTL0.dis_srx_zqcl = 0, the uMCTL2 performs a ZQCL command
637 * at this point.
638 */
639
640 /*
641 * 14. Perform MRS commands as required to re-program timing registers
642 * in the SDRAM for the new frequency
643 * (in particular, CL, CWL and WR may need to be changed).
644 */
645
646 /* 15. Write DBG1.dis_hif = 0 to re-enable reads and writes. */
647 mmio_clrbits_32((uint32_t)&priv->ctl->dbg1, DDRCTRL_DBG1_DIS_HIF);
648 VERBOSE("[0x%x] dbg1 = 0x%x\n",
649 (uint32_t)&priv->ctl->dbg1,
650 mmio_read_32((uint32_t)&priv->ctl->dbg1));
651}
652
653static void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
654{
655 stm32mp1_start_sw_done(ctl);
656 /* Quasi-dynamic register update*/
657 mmio_setbits_32((uint32_t)&ctl->rfshctl3,
658 DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
659 mmio_clrbits_32((uint32_t)&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
660 mmio_clrbits_32((uint32_t)&ctl->dfimisc,
661 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
662 stm32mp1_wait_sw_done_ack(ctl);
663}
664
665static void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
666 uint32_t rfshctl3, uint32_t pwrctl)
667{
668 stm32mp1_start_sw_done(ctl);
669 if ((rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH) == 0U) {
670 mmio_clrbits_32((uint32_t)&ctl->rfshctl3,
671 DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
672 }
673 if ((pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN) != 0U) {
674 mmio_setbits_32((uint32_t)&ctl->pwrctl,
675 DDRCTRL_PWRCTL_POWERDOWN_EN);
676 }
677 mmio_setbits_32((uint32_t)&ctl->dfimisc,
678 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
679 stm32mp1_wait_sw_done_ack(ctl);
680}
681
682static int board_ddr_power_init(enum ddr_type ddr_type)
683{
684 if (dt_check_pmic()) {
685 return pmic_ddr_power_init(ddr_type);
686 }
687
688 return 0;
689}
690
691void stm32mp1_ddr_init(struct ddr_info *priv,
692 struct stm32mp1_ddr_config *config)
693{
694 uint32_t pir;
695 int ret;
696
697 if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
698 ret = board_ddr_power_init(STM32MP_DDR3);
699 } else {
700 ret = board_ddr_power_init(STM32MP_LPDDR2);
701 }
702
703 if (ret != 0) {
704 panic();
705 }
706
707 VERBOSE("name = %s\n", config->info.name);
708 VERBOSE("speed = %d MHz\n", config->info.speed);
709 VERBOSE("size = 0x%x\n", config->info.size);
710
711 /* DDR INIT SEQUENCE */
712
713 /*
714 * 1. Program the DWC_ddr_umctl2 registers
715 * nota: check DFIMISC.dfi_init_complete = 0
716 */
717
718 /* 1.1 RESETS: presetn, core_ddrc_rstn, aresetn */
719 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
720 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
721 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
722 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
723 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
724 mmio_setbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
725
726 /* 1.2. start CLOCK */
727 if (stm32mp1_ddr_clk_enable(priv, config->info.speed) != 0) {
728 panic();
729 }
730
731 /* 1.3. deassert reset */
732 /* De-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST. */
733 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
734 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
735 /*
736 * De-assert presetn once the clocks are active
737 * and stable via DDRCAPBRST bit.
738 */
739 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
740
741 /* 1.4. wait 128 cycles to permit initialization of end logic */
742 udelay(2);
743 /* For PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
744
745 /* 1.5. initialize registers ddr_umctl2 */
746 /* Stop uMCTL2 before PHY is ready */
747 mmio_clrbits_32((uint32_t)&priv->ctl->dfimisc,
748 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
749 VERBOSE("[0x%x] dfimisc = 0x%x\n",
750 (uint32_t)&priv->ctl->dfimisc,
751 mmio_read_32((uint32_t)&priv->ctl->dfimisc));
752
753 set_reg(priv, REG_REG, &config->c_reg);
754
755 /* DDR3 = don't set DLLOFF for init mode */
756 if ((config->c_reg.mstr &
757 (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
758 == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
759 VERBOSE("deactivate DLL OFF in mstr\n");
760 mmio_clrbits_32((uint32_t)&priv->ctl->mstr,
761 DDRCTRL_MSTR_DLL_OFF_MODE);
762 VERBOSE("[0x%x] mstr = 0x%x\n",
763 (uint32_t)&priv->ctl->mstr,
764 mmio_read_32((uint32_t)&priv->ctl->mstr));
765 }
766
767 set_reg(priv, REG_TIMING, &config->c_timing);
768 set_reg(priv, REG_MAP, &config->c_map);
769
770 /* Skip CTRL init, SDRAM init is done by PHY PUBL */
771 mmio_clrsetbits_32((uint32_t)&priv->ctl->init0,
772 DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
773 DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
774 VERBOSE("[0x%x] init0 = 0x%x\n",
775 (uint32_t)&priv->ctl->init0,
776 mmio_read_32((uint32_t)&priv->ctl->init0));
777
778 set_reg(priv, REG_PERF, &config->c_perf);
779
780 /* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
781 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
782 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
783 mmio_clrbits_32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
784
785 /*
786 * 3. start PHY init by accessing relevant PUBL registers
787 * (DXGCR, DCR, PTR*, MR*, DTPR*)
788 */
789 set_reg(priv, REGPHY_REG, &config->p_reg);
790 set_reg(priv, REGPHY_TIMING, &config->p_timing);
791 set_reg(priv, REGPHY_CAL, &config->p_cal);
792
793 /* DDR3 = don't set DLLOFF for init mode */
794 if ((config->c_reg.mstr &
795 (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE))
796 == (DDRCTRL_MSTR_DDR3 | DDRCTRL_MSTR_DLL_OFF_MODE)) {
797 VERBOSE("deactivate DLL OFF in mr1\n");
798 mmio_clrbits_32((uint32_t)&priv->phy->mr1, BIT(0));
799 VERBOSE("[0x%x] mr1 = 0x%x\n",
800 (uint32_t)&priv->phy->mr1,
801 mmio_read_32((uint32_t)&priv->phy->mr1));
802 }
803
804 /*
805 * 4. Monitor PHY init status by polling PUBL register PGSR.IDONE
806 * Perform DDR PHY DRAM initialization and Gate Training Evaluation
807 */
808 stm32mp1_ddrphy_idone_wait(priv->phy);
809
810 /*
811 * 5. Indicate to PUBL that controller performs SDRAM initialization
812 * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
813 * DRAM init is done by PHY, init0.skip_dram.init = 1
814 */
815
816 pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
817 DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
818
819 if ((config->c_reg.mstr & DDRCTRL_MSTR_DDR3) != 0U) {
820 pir |= DDRPHYC_PIR_DRAMRST; /* Only for DDR3 */
821 }
822
823 stm32mp1_ddrphy_init(priv->phy, pir);
824
825 /*
826 * 6. SET DFIMISC.dfi_init_complete_en to 1
827 * Enable quasi-dynamic register programming.
828 */
829 stm32mp1_start_sw_done(priv->ctl);
830
831 mmio_setbits_32((uint32_t)&priv->ctl->dfimisc,
832 DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
833 VERBOSE("[0x%x] dfimisc = 0x%x\n",
834 (uint32_t)&priv->ctl->dfimisc,
835 mmio_read_32((uint32_t)&priv->ctl->dfimisc));
836
837 stm32mp1_wait_sw_done_ack(priv->ctl);
838
839 /*
840 * 7. Wait for DWC_ddr_umctl2 to move to normal operation mode
841 * by monitoring STAT.operating_mode signal
842 */
843
844 /* Wait uMCTL2 ready */
845 stm32mp1_wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
846
847 /* Switch to DLL OFF mode */
848 if ((config->c_reg.mstr & DDRCTRL_MSTR_DLL_OFF_MODE) != 0U) {
849 stm32mp1_ddr3_dll_off(priv);
850 }
851
852 VERBOSE("DDR DQS training : ");
853
854 /*
855 * 8. Disable Auto refresh and power down by setting
856 * - RFSHCTL3.dis_au_refresh = 1
857 * - PWRCTL.powerdown_en = 0
858 * - DFIMISC.dfiinit_complete_en = 0
859 */
860 stm32mp1_refresh_disable(priv->ctl);
861
862 /*
863 * 9. Program PUBL PGCR to enable refresh during training
864 * and rank to train
865 * not done => keep the programed value in PGCR
866 */
867
868 /*
869 * 10. configure PUBL PIR register to specify which training step
870 * to run
871 * Warning : RVTRN is not supported by this PUBL
872 */
873 stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN);
874
875 /* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
876 stm32mp1_ddrphy_idone_wait(priv->phy);
877
878 /*
879 * 12. set back registers in step 8 to the orginal values if desidered
880 */
881 stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
882 config->c_reg.pwrctl);
883
884 /* Enable uMCTL2 AXI port 0 */
885 mmio_setbits_32((uint32_t)&priv->ctl->pctrl_0, DDRCTRL_PCTRL_N_PORT_EN);
886 VERBOSE("[0x%x] pctrl_0 = 0x%x\n",
887 (uint32_t)&priv->ctl->pctrl_0,
888 mmio_read_32((uint32_t)&priv->ctl->pctrl_0));
889
890 /* Enable uMCTL2 AXI port 1 */
891 mmio_setbits_32((uint32_t)&priv->ctl->pctrl_1, DDRCTRL_PCTRL_N_PORT_EN);
892 VERBOSE("[0x%x] pctrl_1 = 0x%x\n",
893 (uint32_t)&priv->ctl->pctrl_1,
894 mmio_read_32((uint32_t)&priv->ctl->pctrl_1));
895}