blob: 5e1041dffeb706ef67dae9ab6a6b1710dd1a6eca [file] [log] [blame]
Tom Rini8b0c8a12018-05-06 18:27:01 -04001// SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
Patrick Delaunay939d5362018-03-12 10:46:11 +01002/*
3 * Copyright (C) 2018, STMicroelectronics - All Rights Reserved
Patrick Delaunay939d5362018-03-12 10:46:11 +01004 */
5
6#include <common.h>
7#include <clk.h>
8#include <ram.h>
9#include <reset.h>
10#include <timer.h>
11#include <asm/io.h>
12#include <asm/arch/ddr.h>
13#include <linux/iopoll.h>
14#include "stm32mp1_ddr.h"
15#include "stm32mp1_ddr_regs.h"
16
17#define RCC_DDRITFCR 0xD8
18
19#define RCC_DDRITFCR_DDRCAPBRST (BIT(14))
20#define RCC_DDRITFCR_DDRCAXIRST (BIT(15))
21#define RCC_DDRITFCR_DDRCORERST (BIT(16))
22#define RCC_DDRITFCR_DPHYAPBRST (BIT(17))
23#define RCC_DDRITFCR_DPHYRST (BIT(18))
24#define RCC_DDRITFCR_DPHYCTLRST (BIT(19))
25
26struct reg_desc {
27 const char *name;
28 u16 offset; /* offset for base address */
29 u8 par_offset; /* offset for parameter array */
30};
31
32#define INVALID_OFFSET 0xFF
33
34#define DDRCTL_REG(x, y) \
35 {#x,\
36 offsetof(struct stm32mp1_ddrctl, x),\
37 offsetof(struct y, x)}
38
39#define DDRPHY_REG(x, y) \
40 {#x,\
41 offsetof(struct stm32mp1_ddrphy, x),\
42 offsetof(struct y, x)}
43
Patrick Delaunayd892d272019-04-10 14:09:25 +020044/***********************************************************
45 * PARAMETERS: value get from device tree :
46 * size / order need to be aligned with binding
47 * modification NOT ALLOWED !!!
48 ***********************************************************/
49#define DDRCTL_REG_REG_SIZE 25 /* st,ctl-reg */
50#define DDRCTL_REG_TIMING_SIZE 12 /* st,ctl-timing */
51#define DDRCTL_REG_MAP_SIZE 9 /* st,ctl-map */
52#define DDRCTL_REG_PERF_SIZE 17 /* st,ctl-perf */
53
54#define DDRPHY_REG_REG_SIZE 11 /* st,phy-reg */
55#define DDRPHY_REG_TIMING_SIZE 10 /* st,phy-timing */
56#define DDRPHY_REG_CAL_SIZE 12 /* st,phy-cal */
57
Patrick Delaunay939d5362018-03-12 10:46:11 +010058#define DDRCTL_REG_REG(x) DDRCTL_REG(x, stm32mp1_ddrctrl_reg)
Patrick Delaunayd892d272019-04-10 14:09:25 +020059static const struct reg_desc ddr_reg[DDRCTL_REG_REG_SIZE] = {
Patrick Delaunay939d5362018-03-12 10:46:11 +010060 DDRCTL_REG_REG(mstr),
61 DDRCTL_REG_REG(mrctrl0),
62 DDRCTL_REG_REG(mrctrl1),
63 DDRCTL_REG_REG(derateen),
64 DDRCTL_REG_REG(derateint),
65 DDRCTL_REG_REG(pwrctl),
66 DDRCTL_REG_REG(pwrtmg),
67 DDRCTL_REG_REG(hwlpctl),
68 DDRCTL_REG_REG(rfshctl0),
69 DDRCTL_REG_REG(rfshctl3),
70 DDRCTL_REG_REG(crcparctl0),
71 DDRCTL_REG_REG(zqctl0),
72 DDRCTL_REG_REG(dfitmg0),
73 DDRCTL_REG_REG(dfitmg1),
74 DDRCTL_REG_REG(dfilpcfg0),
75 DDRCTL_REG_REG(dfiupd0),
76 DDRCTL_REG_REG(dfiupd1),
77 DDRCTL_REG_REG(dfiupd2),
78 DDRCTL_REG_REG(dfiphymstr),
79 DDRCTL_REG_REG(odtmap),
80 DDRCTL_REG_REG(dbg0),
81 DDRCTL_REG_REG(dbg1),
82 DDRCTL_REG_REG(dbgcmd),
83 DDRCTL_REG_REG(poisoncfg),
84 DDRCTL_REG_REG(pccfg),
85};
86
87#define DDRCTL_REG_TIMING(x) DDRCTL_REG(x, stm32mp1_ddrctrl_timing)
Patrick Delaunayd892d272019-04-10 14:09:25 +020088static const struct reg_desc ddr_timing[DDRCTL_REG_TIMING_SIZE] = {
Patrick Delaunay939d5362018-03-12 10:46:11 +010089 DDRCTL_REG_TIMING(rfshtmg),
90 DDRCTL_REG_TIMING(dramtmg0),
91 DDRCTL_REG_TIMING(dramtmg1),
92 DDRCTL_REG_TIMING(dramtmg2),
93 DDRCTL_REG_TIMING(dramtmg3),
94 DDRCTL_REG_TIMING(dramtmg4),
95 DDRCTL_REG_TIMING(dramtmg5),
96 DDRCTL_REG_TIMING(dramtmg6),
97 DDRCTL_REG_TIMING(dramtmg7),
98 DDRCTL_REG_TIMING(dramtmg8),
99 DDRCTL_REG_TIMING(dramtmg14),
100 DDRCTL_REG_TIMING(odtcfg),
101};
102
103#define DDRCTL_REG_MAP(x) DDRCTL_REG(x, stm32mp1_ddrctrl_map)
Patrick Delaunayd892d272019-04-10 14:09:25 +0200104static const struct reg_desc ddr_map[DDRCTL_REG_MAP_SIZE] = {
Patrick Delaunay939d5362018-03-12 10:46:11 +0100105 DDRCTL_REG_MAP(addrmap1),
106 DDRCTL_REG_MAP(addrmap2),
107 DDRCTL_REG_MAP(addrmap3),
108 DDRCTL_REG_MAP(addrmap4),
109 DDRCTL_REG_MAP(addrmap5),
110 DDRCTL_REG_MAP(addrmap6),
111 DDRCTL_REG_MAP(addrmap9),
112 DDRCTL_REG_MAP(addrmap10),
113 DDRCTL_REG_MAP(addrmap11),
114};
115
116#define DDRCTL_REG_PERF(x) DDRCTL_REG(x, stm32mp1_ddrctrl_perf)
Patrick Delaunayd892d272019-04-10 14:09:25 +0200117static const struct reg_desc ddr_perf[DDRCTL_REG_PERF_SIZE] = {
Patrick Delaunay939d5362018-03-12 10:46:11 +0100118 DDRCTL_REG_PERF(sched),
119 DDRCTL_REG_PERF(sched1),
120 DDRCTL_REG_PERF(perfhpr1),
121 DDRCTL_REG_PERF(perflpr1),
122 DDRCTL_REG_PERF(perfwr1),
123 DDRCTL_REG_PERF(pcfgr_0),
124 DDRCTL_REG_PERF(pcfgw_0),
125 DDRCTL_REG_PERF(pcfgqos0_0),
126 DDRCTL_REG_PERF(pcfgqos1_0),
127 DDRCTL_REG_PERF(pcfgwqos0_0),
128 DDRCTL_REG_PERF(pcfgwqos1_0),
129 DDRCTL_REG_PERF(pcfgr_1),
130 DDRCTL_REG_PERF(pcfgw_1),
131 DDRCTL_REG_PERF(pcfgqos0_1),
132 DDRCTL_REG_PERF(pcfgqos1_1),
133 DDRCTL_REG_PERF(pcfgwqos0_1),
134 DDRCTL_REG_PERF(pcfgwqos1_1),
135};
136
137#define DDRPHY_REG_REG(x) DDRPHY_REG(x, stm32mp1_ddrphy_reg)
Patrick Delaunayd892d272019-04-10 14:09:25 +0200138static const struct reg_desc ddrphy_reg[DDRPHY_REG_REG_SIZE] = {
Patrick Delaunay939d5362018-03-12 10:46:11 +0100139 DDRPHY_REG_REG(pgcr),
140 DDRPHY_REG_REG(aciocr),
141 DDRPHY_REG_REG(dxccr),
142 DDRPHY_REG_REG(dsgcr),
143 DDRPHY_REG_REG(dcr),
144 DDRPHY_REG_REG(odtcr),
145 DDRPHY_REG_REG(zq0cr1),
146 DDRPHY_REG_REG(dx0gcr),
147 DDRPHY_REG_REG(dx1gcr),
148 DDRPHY_REG_REG(dx2gcr),
149 DDRPHY_REG_REG(dx3gcr),
150};
151
152#define DDRPHY_REG_TIMING(x) DDRPHY_REG(x, stm32mp1_ddrphy_timing)
Patrick Delaunayd892d272019-04-10 14:09:25 +0200153static const struct reg_desc ddrphy_timing[DDRPHY_REG_TIMING_SIZE] = {
Patrick Delaunay939d5362018-03-12 10:46:11 +0100154 DDRPHY_REG_TIMING(ptr0),
155 DDRPHY_REG_TIMING(ptr1),
156 DDRPHY_REG_TIMING(ptr2),
157 DDRPHY_REG_TIMING(dtpr0),
158 DDRPHY_REG_TIMING(dtpr1),
159 DDRPHY_REG_TIMING(dtpr2),
160 DDRPHY_REG_TIMING(mr0),
161 DDRPHY_REG_TIMING(mr1),
162 DDRPHY_REG_TIMING(mr2),
163 DDRPHY_REG_TIMING(mr3),
164};
165
166#define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal)
Patrick Delaunayd892d272019-04-10 14:09:25 +0200167static const struct reg_desc ddrphy_cal[DDRPHY_REG_CAL_SIZE] = {
Patrick Delaunay939d5362018-03-12 10:46:11 +0100168 DDRPHY_REG_CAL(dx0dllcr),
169 DDRPHY_REG_CAL(dx0dqtr),
170 DDRPHY_REG_CAL(dx0dqstr),
171 DDRPHY_REG_CAL(dx1dllcr),
172 DDRPHY_REG_CAL(dx1dqtr),
173 DDRPHY_REG_CAL(dx1dqstr),
174 DDRPHY_REG_CAL(dx2dllcr),
175 DDRPHY_REG_CAL(dx2dqtr),
176 DDRPHY_REG_CAL(dx2dqstr),
177 DDRPHY_REG_CAL(dx3dllcr),
178 DDRPHY_REG_CAL(dx3dqtr),
179 DDRPHY_REG_CAL(dx3dqstr),
180};
181
Patrick Delaunayd892d272019-04-10 14:09:25 +0200182/*****************************************************************
183 * REGISTERS ARRAY: used to parse device tree and interactive mode
184 *****************************************************************/
Patrick Delaunay939d5362018-03-12 10:46:11 +0100185enum reg_type {
186 REG_REG,
187 REG_TIMING,
188 REG_PERF,
189 REG_MAP,
190 REGPHY_REG,
191 REGPHY_TIMING,
192 REGPHY_CAL,
193 REG_TYPE_NB
194};
195
196enum base_type {
197 DDR_BASE,
198 DDRPHY_BASE,
199 NONE_BASE
200};
201
202struct ddr_reg_info {
203 const char *name;
204 const struct reg_desc *desc;
205 u8 size;
206 enum base_type base;
207};
208
209#define DDRPHY_REG_CAL(x) DDRPHY_REG(x, stm32mp1_ddrphy_cal)
210
211const struct ddr_reg_info ddr_registers[REG_TYPE_NB] = {
212[REG_REG] = {
Patrick Delaunayd892d272019-04-10 14:09:25 +0200213 "static", ddr_reg, DDRCTL_REG_REG_SIZE, DDR_BASE},
Patrick Delaunay939d5362018-03-12 10:46:11 +0100214[REG_TIMING] = {
Patrick Delaunayd892d272019-04-10 14:09:25 +0200215 "timing", ddr_timing, DDRCTL_REG_TIMING_SIZE, DDR_BASE},
Patrick Delaunay939d5362018-03-12 10:46:11 +0100216[REG_PERF] = {
Patrick Delaunayd892d272019-04-10 14:09:25 +0200217 "perf", ddr_perf, DDRCTL_REG_PERF_SIZE, DDR_BASE},
Patrick Delaunay939d5362018-03-12 10:46:11 +0100218[REG_MAP] = {
Patrick Delaunayd892d272019-04-10 14:09:25 +0200219 "map", ddr_map, DDRCTL_REG_MAP_SIZE, DDR_BASE},
Patrick Delaunay939d5362018-03-12 10:46:11 +0100220[REGPHY_REG] = {
Patrick Delaunayd892d272019-04-10 14:09:25 +0200221 "static", ddrphy_reg, DDRPHY_REG_REG_SIZE, DDRPHY_BASE},
Patrick Delaunay939d5362018-03-12 10:46:11 +0100222[REGPHY_TIMING] = {
Patrick Delaunayd892d272019-04-10 14:09:25 +0200223 "timing", ddrphy_timing, DDRPHY_REG_TIMING_SIZE, DDRPHY_BASE},
Patrick Delaunay939d5362018-03-12 10:46:11 +0100224[REGPHY_CAL] = {
Patrick Delaunayd892d272019-04-10 14:09:25 +0200225 "cal", ddrphy_cal, DDRPHY_REG_CAL_SIZE, DDRPHY_BASE},
Patrick Delaunay939d5362018-03-12 10:46:11 +0100226};
227
228const char *base_name[] = {
229 [DDR_BASE] = "ctl",
230 [DDRPHY_BASE] = "phy",
231};
232
233static u32 get_base_addr(const struct ddr_info *priv, enum base_type base)
234{
235 if (base == DDRPHY_BASE)
236 return (u32)priv->phy;
237 else
238 return (u32)priv->ctl;
239}
240
241static void set_reg(const struct ddr_info *priv,
242 enum reg_type type,
243 const void *param)
244{
245 unsigned int i;
246 unsigned int *ptr, value;
247 enum base_type base = ddr_registers[type].base;
248 u32 base_addr = get_base_addr(priv, base);
249 const struct reg_desc *desc = ddr_registers[type].desc;
250
251 debug("init %s\n", ddr_registers[type].name);
252 for (i = 0; i < ddr_registers[type].size; i++) {
253 ptr = (unsigned int *)(base_addr + desc[i].offset);
254 if (desc[i].par_offset == INVALID_OFFSET) {
255 pr_err("invalid parameter offset for %s", desc[i].name);
256 } else {
257 value = *((u32 *)((u32)param +
258 desc[i].par_offset));
259 writel(value, ptr);
260 debug("[0x%x] %s= 0x%08x\n",
261 (u32)ptr, desc[i].name, value);
262 }
263 }
264}
265
266static void ddrphy_idone_wait(struct stm32mp1_ddrphy *phy)
267{
268 u32 pgsr;
269 int ret;
270
271 ret = readl_poll_timeout(&phy->pgsr, pgsr,
272 pgsr & (DDRPHYC_PGSR_IDONE |
273 DDRPHYC_PGSR_DTERR |
274 DDRPHYC_PGSR_DTIERR |
275 DDRPHYC_PGSR_DFTERR |
276 DDRPHYC_PGSR_RVERR |
277 DDRPHYC_PGSR_RVEIRR),
278 1000000);
279 debug("\n[0x%08x] pgsr = 0x%08x ret=%d\n",
280 (u32)&phy->pgsr, pgsr, ret);
281}
282
283void stm32mp1_ddrphy_init(struct stm32mp1_ddrphy *phy, u32 pir)
284{
285 pir |= DDRPHYC_PIR_INIT;
286 writel(pir, &phy->pir);
287 debug("[0x%08x] pir = 0x%08x -> 0x%08x\n",
288 (u32)&phy->pir, pir, readl(&phy->pir));
289
290 /* need to wait 10 configuration clock before start polling */
291 udelay(10);
292
293 /* Wait DRAM initialization and Gate Training Evaluation complete */
294 ddrphy_idone_wait(phy);
295}
296
297/* start quasi dynamic register update */
298static void start_sw_done(struct stm32mp1_ddrctl *ctl)
299{
300 clrbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
301}
302
303/* wait quasi dynamic register update */
304static void wait_sw_done_ack(struct stm32mp1_ddrctl *ctl)
305{
306 int ret;
307 u32 swstat;
308
309 setbits_le32(&ctl->swctl, DDRCTRL_SWCTL_SW_DONE);
310
311 ret = readl_poll_timeout(&ctl->swstat, swstat,
312 swstat & DDRCTRL_SWSTAT_SW_DONE_ACK,
313 1000000);
314 if (ret)
315 panic("Timeout initialising DRAM : DDR->swstat = %x\n",
316 swstat);
317
318 debug("[0x%08x] swstat = 0x%08x\n", (u32)&ctl->swstat, swstat);
319}
320
321/* wait quasi dynamic register update */
322static void wait_operating_mode(struct ddr_info *priv, int mode)
323{
324 u32 stat, val, mask, val2 = 0, mask2 = 0;
325 int ret;
326
327 mask = DDRCTRL_STAT_OPERATING_MODE_MASK;
328 val = mode;
329 /* self-refresh due to software => check also STAT.selfref_type */
330 if (mode == DDRCTRL_STAT_OPERATING_MODE_SR) {
331 mask |= DDRCTRL_STAT_SELFREF_TYPE_MASK;
Patrick Delaunay250bf902019-04-10 14:09:21 +0200332 val |= DDRCTRL_STAT_SELFREF_TYPE_SR;
Patrick Delaunay939d5362018-03-12 10:46:11 +0100333 } else if (mode == DDRCTRL_STAT_OPERATING_MODE_NORMAL) {
334 /* normal mode: handle also automatic self refresh */
335 mask2 = DDRCTRL_STAT_OPERATING_MODE_MASK |
336 DDRCTRL_STAT_SELFREF_TYPE_MASK;
337 val2 = DDRCTRL_STAT_OPERATING_MODE_SR |
338 DDRCTRL_STAT_SELFREF_TYPE_ASR;
339 }
340
341 ret = readl_poll_timeout(&priv->ctl->stat, stat,
342 ((stat & mask) == val) ||
343 (mask2 && ((stat & mask2) == val2)),
344 1000000);
345
346 if (ret)
347 panic("Timeout DRAM : DDR->stat = %x\n", stat);
348
349 debug("[0x%08x] stat = 0x%08x\n", (u32)&priv->ctl->stat, stat);
350}
351
352void stm32mp1_refresh_disable(struct stm32mp1_ddrctl *ctl)
353{
354 start_sw_done(ctl);
355 /* quasi-dynamic register update*/
356 setbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
357 clrbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
358 clrbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
359 wait_sw_done_ack(ctl);
360}
361
362void stm32mp1_refresh_restore(struct stm32mp1_ddrctl *ctl,
363 u32 rfshctl3, u32 pwrctl)
364{
365 start_sw_done(ctl);
366 if (!(rfshctl3 & DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH))
367 clrbits_le32(&ctl->rfshctl3, DDRCTRL_RFSHCTL3_DIS_AUTO_REFRESH);
368 if (pwrctl & DDRCTRL_PWRCTL_POWERDOWN_EN)
369 setbits_le32(&ctl->pwrctl, DDRCTRL_PWRCTL_POWERDOWN_EN);
370 setbits_le32(&ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
371 wait_sw_done_ack(ctl);
372}
373
374/* board-specific DDR power initializations. */
375__weak int board_ddr_power_init(void)
376{
377 return 0;
378}
379
380__maybe_unused
381void stm32mp1_ddr_init(struct ddr_info *priv,
382 const struct stm32mp1_ddr_config *config)
383{
384 u32 pir;
385 int ret;
386
387 ret = board_ddr_power_init();
388
389 if (ret)
390 panic("ddr power init failed\n");
391
392 debug("name = %s\n", config->info.name);
Patrick Delaunay29e1a942019-04-10 14:09:23 +0200393 debug("speed = %d kHz\n", config->info.speed);
Patrick Delaunay939d5362018-03-12 10:46:11 +0100394 debug("size = 0x%x\n", config->info.size);
395/*
396 * 1. Program the DWC_ddr_umctl2 registers
397 * 1.1 RESETS: presetn, core_ddrc_rstn, aresetn
398 */
399 /* Assert All DDR part */
400 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
401 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
402 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
403 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
404 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
405 setbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
406
407/* 1.2. start CLOCK */
408 if (stm32mp1_ddr_clk_enable(priv, config->info.speed))
Patrick Delaunay29e1a942019-04-10 14:09:23 +0200409 panic("invalid DRAM clock : %d kHz\n",
Patrick Delaunay939d5362018-03-12 10:46:11 +0100410 config->info.speed);
411
412/* 1.3. deassert reset */
413 /* de-assert PHY rstn and ctl_rstn via DPHYRST and DPHYCTLRST */
414 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYRST);
415 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYCTLRST);
416 /* De-assert presetn once the clocks are active
417 * and stable via DDRCAPBRST bit
418 */
419 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAPBRST);
420
Patrick Delaunay1f987042019-04-10 14:09:22 +0200421/* 1.4. wait 128 cycles to permit initialization of end logic */
422 udelay(2);
423 /* for PCLK = 133MHz => 1 us is enough, 2 to allow lower frequency */
Patrick Delaunay939d5362018-03-12 10:46:11 +0100424
425/* 1.5. initialize registers ddr_umctl2 */
426 /* Stop uMCTL2 before PHY is ready */
427 clrbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
428 debug("[0x%08x] dfimisc = 0x%08x\n",
429 (u32)&priv->ctl->dfimisc, readl(&priv->ctl->dfimisc));
430
431 set_reg(priv, REG_REG, &config->c_reg);
432 set_reg(priv, REG_TIMING, &config->c_timing);
433 set_reg(priv, REG_MAP, &config->c_map);
434
435 /* skip CTRL init, SDRAM init is done by PHY PUBL */
436 clrsetbits_le32(&priv->ctl->init0,
437 DDRCTRL_INIT0_SKIP_DRAM_INIT_MASK,
438 DDRCTRL_INIT0_SKIP_DRAM_INIT_NORMAL);
439
440 set_reg(priv, REG_PERF, &config->c_perf);
441
442/* 2. deassert reset signal core_ddrc_rstn, aresetn and presetn */
443 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCORERST);
444 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DDRCAXIRST);
445 clrbits_le32(priv->rcc + RCC_DDRITFCR, RCC_DDRITFCR_DPHYAPBRST);
446
447/* 3. start PHY init by accessing relevant PUBL registers
448 * (DXGCR, DCR, PTR*, MR*, DTPR*)
449 */
450 set_reg(priv, REGPHY_REG, &config->p_reg);
451 set_reg(priv, REGPHY_TIMING, &config->p_timing);
452 set_reg(priv, REGPHY_CAL, &config->p_cal);
453
454/* 4. Monitor PHY init status by polling PUBL register PGSR.IDONE
455 * Perform DDR PHY DRAM initialization and Gate Training Evaluation
456 */
457 ddrphy_idone_wait(priv->phy);
458
459/* 5. Indicate to PUBL that controller performs SDRAM initialization
460 * by setting PIR.INIT and PIR CTLDINIT and pool PGSR.IDONE
461 * DRAM init is done by PHY, init0.skip_dram.init = 1
462 */
463 pir = DDRPHYC_PIR_DLLSRST | DDRPHYC_PIR_DLLLOCK | DDRPHYC_PIR_ZCAL |
464 DDRPHYC_PIR_ITMSRST | DDRPHYC_PIR_DRAMINIT | DDRPHYC_PIR_ICPC;
465
466 if (config->c_reg.mstr & DDRCTRL_MSTR_DDR3)
467 pir |= DDRPHYC_PIR_DRAMRST; /* only for DDR3 */
468
469 stm32mp1_ddrphy_init(priv->phy, pir);
470
471/* 6. SET DFIMISC.dfi_init_complete_en to 1 */
472 /* Enable quasi-dynamic register programming*/
473 start_sw_done(priv->ctl);
474 setbits_le32(&priv->ctl->dfimisc, DDRCTRL_DFIMISC_DFI_INIT_COMPLETE_EN);
475 wait_sw_done_ack(priv->ctl);
476
477/* 7. Wait for DWC_ddr_umctl2 to move to normal operation mode
478 * by monitoring STAT.operating_mode signal
479 */
480 /* wait uMCTL2 ready */
481
482 wait_operating_mode(priv, DDRCTRL_STAT_OPERATING_MODE_NORMAL);
483
484 debug("DDR DQS training : ");
485/* 8. Disable Auto refresh and power down by setting
486 * - RFSHCTL3.dis_au_refresh = 1
487 * - PWRCTL.powerdown_en = 0
488 * - DFIMISC.dfiinit_complete_en = 0
489 */
490 stm32mp1_refresh_disable(priv->ctl);
491
492/* 9. Program PUBL PGCR to enable refresh during training and rank to train
493 * not done => keep the programed value in PGCR
494 */
495
496/* 10. configure PUBL PIR register to specify which training step to run */
497 /* warning : RVTRN is not supported by this PUBL */
498 stm32mp1_ddrphy_init(priv->phy, DDRPHYC_PIR_QSTRN);
499
500/* 11. monitor PUB PGSR.IDONE to poll cpmpletion of training sequence */
501 ddrphy_idone_wait(priv->phy);
502
503/* 12. set back registers in step 8 to the orginal values if desidered */
504 stm32mp1_refresh_restore(priv->ctl, config->c_reg.rfshctl3,
505 config->c_reg.pwrctl);
506
507 /* enable uMCTL2 AXI port 0 and 1 */
508 setbits_le32(&priv->ctl->pctrl_0, DDRCTRL_PCTRL_N_PORT_EN);
509 setbits_le32(&priv->ctl->pctrl_1, DDRCTRL_PCTRL_N_PORT_EN);
510}