blob: e96ac54c395aa8d6e60c0ffa73becdb22e93c384 [file] [log] [blame]
Kever Yang6fc9ebf2018-12-20 11:33:42 +08001// SPDX-License-Identifier: GPL-2.0+ OR BSD-3-Clause
Kever Yang1a94b9e2017-09-27 16:38:22 +08002/*
3 * (C) Copyright 2017 Rockchip Electronics Co., Ltd
Kever Yang1a94b9e2017-09-27 16:38:22 +08004 */
5#include <common.h>
6#include <clk.h>
7#include <dm.h>
8#include <dt-structs.h>
9#include <errno.h>
10#include <ram.h>
11#include <regmap.h>
12#include <syscon.h>
13#include <asm/io.h>
Kever Yang9fbe17c2019-03-28 11:01:23 +080014#include <asm/arch-rockchip/clock.h>
15#include <asm/arch-rockchip/cru_rk322x.h>
16#include <asm/arch-rockchip/grf_rk322x.h>
17#include <asm/arch-rockchip/hardware.h>
18#include <asm/arch-rockchip/sdram_rk322x.h>
19#include <asm/arch-rockchip/timer.h>
20#include <asm/arch-rockchip/uart.h>
21#include <asm/arch-rockchip/sdram_common.h>
Kever Yang1a94b9e2017-09-27 16:38:22 +080022#include <asm/types.h>
23#include <linux/err.h>
24
25DECLARE_GLOBAL_DATA_PTR;
26struct chan_info {
27 struct rk322x_ddr_pctl *pctl;
28 struct rk322x_ddr_phy *phy;
29 struct rk322x_service_sys *msch;
30};
31
32struct dram_info {
33 struct chan_info chan[1];
34 struct ram_info info;
35 struct clk ddr_clk;
36 struct rk322x_cru *cru;
37 struct rk322x_grf *grf;
38};
39
40struct rk322x_sdram_params {
41#if CONFIG_IS_ENABLED(OF_PLATDATA)
42 struct dtd_rockchip_rk3228_dmc of_plat;
43#endif
44 struct rk322x_sdram_channel ch[1];
45 struct rk322x_pctl_timing pctl_timing;
46 struct rk322x_phy_timing phy_timing;
47 struct rk322x_base_params base;
48 int num_channels;
49 struct regmap *map;
50};
51
Kever Yang956798c2019-04-02 20:41:19 +080052#ifdef CONFIG_TPL_BUILD
Kever Yang1a94b9e2017-09-27 16:38:22 +080053/*
54 * [7:6] bank(n:n bit bank)
55 * [5:4] row(13+n)
56 * [3] cs(0:1 cs, 1:2 cs)
57 * [2:1] bank(n:n bit bank)
58 * [0] col(10+n)
59 */
60const char ddr_cfg_2_rbc[] = {
61 ((0 << 6) | (0 << 4) | (0 << 3) | (1 << 2) | 1),
62 ((0 << 6) | (1 << 4) | (0 << 3) | (1 << 2) | 1),
63 ((0 << 6) | (2 << 4) | (0 << 3) | (1 << 2) | 1),
64 ((0 << 6) | (3 << 4) | (0 << 3) | (1 << 2) | 1),
65 ((0 << 6) | (1 << 4) | (0 << 3) | (1 << 2) | 2),
66 ((0 << 6) | (2 << 4) | (0 << 3) | (1 << 2) | 2),
67 ((0 << 6) | (3 << 4) | (0 << 3) | (1 << 2) | 2),
68 ((0 << 6) | (0 << 4) | (0 << 3) | (1 << 2) | 0),
69 ((0 << 6) | (1 << 4) | (0 << 3) | (1 << 2) | 0),
70 ((0 << 6) | (2 << 4) | (0 << 3) | (1 << 2) | 0),
71 ((0 << 6) | (3 << 4) | (0 << 3) | (1 << 2) | 0),
72 ((0 << 6) | (2 << 4) | (0 << 3) | (0 << 2) | 1),
73 ((1 << 6) | (1 << 4) | (0 << 3) | (0 << 2) | 2),
74 ((1 << 6) | (1 << 4) | (0 << 3) | (0 << 2) | 1),
75 ((0 << 6) | (3 << 4) | (1 << 3) | (1 << 2) | 1),
76 ((0 << 6) | (3 << 4) | (1 << 3) | (1 << 2) | 0),
77};
78
79static void copy_to_reg(u32 *dest, const u32 *src, u32 n)
80{
81 int i;
82
83 for (i = 0; i < n / sizeof(u32); i++) {
84 writel(*src, dest);
85 src++;
86 dest++;
87 }
88}
89
90void phy_pctrl_reset(struct rk322x_cru *cru,
91 struct rk322x_ddr_phy *ddr_phy)
92{
93 rk_clrsetreg(&cru->cru_softrst_con[5], 1 << DDRCTRL_PSRST_SHIFT |
94 1 << DDRCTRL_SRST_SHIFT | 1 << DDRPHY_PSRST_SHIFT |
95 1 << DDRPHY_SRST_SHIFT,
96 1 << DDRCTRL_PSRST_SHIFT | 1 << DDRCTRL_SRST_SHIFT |
97 1 << DDRPHY_PSRST_SHIFT | 1 << DDRPHY_SRST_SHIFT);
98
99 rockchip_udelay(10);
100
101 rk_clrreg(&cru->cru_softrst_con[5], 1 << DDRPHY_PSRST_SHIFT |
102 1 << DDRPHY_SRST_SHIFT);
103 rockchip_udelay(10);
104
105 rk_clrreg(&cru->cru_softrst_con[5], 1 << DDRCTRL_PSRST_SHIFT |
106 1 << DDRCTRL_SRST_SHIFT);
107 rockchip_udelay(10);
108
109 clrbits_le32(&ddr_phy->ddrphy_reg[0],
110 SOFT_RESET_MASK << SOFT_RESET_SHIFT);
111 rockchip_udelay(10);
112 setbits_le32(&ddr_phy->ddrphy_reg[0],
113 SOFT_DERESET_ANALOG);
114 rockchip_udelay(5);
115 setbits_le32(&ddr_phy->ddrphy_reg[0],
116 SOFT_DERESET_DIGITAL);
117
118 rockchip_udelay(1);
119}
120
121void phy_dll_bypass_set(struct rk322x_ddr_phy *ddr_phy, u32 freq)
122{
123 u32 tmp;
124
125 setbits_le32(&ddr_phy->ddrphy_reg[0x13], 0x10);
126 setbits_le32(&ddr_phy->ddrphy_reg[0x26], 0x10);
127 setbits_le32(&ddr_phy->ddrphy_reg[0x36], 0x10);
128 setbits_le32(&ddr_phy->ddrphy_reg[0x46], 0x10);
129 setbits_le32(&ddr_phy->ddrphy_reg[0x56], 0x10);
130
131 clrbits_le32(&ddr_phy->ddrphy_reg[0x14], 0x8);
132 clrbits_le32(&ddr_phy->ddrphy_reg[0x27], 0x8);
133 clrbits_le32(&ddr_phy->ddrphy_reg[0x37], 0x8);
134 clrbits_le32(&ddr_phy->ddrphy_reg[0x47], 0x8);
135 clrbits_le32(&ddr_phy->ddrphy_reg[0x57], 0x8);
136
137 if (freq <= 400)
138 setbits_le32(&ddr_phy->ddrphy_reg[0xa4], 0x1f);
139 else
140 clrbits_le32(&ddr_phy->ddrphy_reg[0xa4], 0x1f);
141
142 if (freq <= 680)
143 tmp = 3;
144 else
145 tmp = 2;
146
147 writel(tmp, &ddr_phy->ddrphy_reg[0x28]);
148 writel(tmp, &ddr_phy->ddrphy_reg[0x38]);
149 writel(tmp, &ddr_phy->ddrphy_reg[0x48]);
150 writel(tmp, &ddr_phy->ddrphy_reg[0x58]);
151}
152
153static void send_command(struct rk322x_ddr_pctl *pctl,
154 u32 rank, u32 cmd, u32 arg)
155{
156 writel((START_CMD | (rank << 20) | arg | cmd), &pctl->mcmd);
157 rockchip_udelay(1);
158 while (readl(&pctl->mcmd) & START_CMD)
159 ;
160}
161
162static void memory_init(struct chan_info *chan,
163 struct rk322x_sdram_params *sdram_params)
164{
165 struct rk322x_ddr_pctl *pctl = chan->pctl;
166 u32 dramtype = sdram_params->base.dramtype;
167
168 if (dramtype == DDR3) {
169 send_command(pctl, 3, DESELECT_CMD, 0);
170 rockchip_udelay(1);
171 send_command(pctl, 3, PREA_CMD, 0);
172 send_command(pctl, 3, MRS_CMD,
173 (0x02 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
174 (sdram_params->phy_timing.mr[2] & CMD_ADDR_MASK) <<
175 CMD_ADDR_SHIFT);
176
177 send_command(pctl, 3, MRS_CMD,
178 (0x03 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
179 (sdram_params->phy_timing.mr[3] & CMD_ADDR_MASK) <<
180 CMD_ADDR_SHIFT);
181
182 send_command(pctl, 3, MRS_CMD,
183 (0x01 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
184 (sdram_params->phy_timing.mr[1] & CMD_ADDR_MASK) <<
185 CMD_ADDR_SHIFT);
186
187 send_command(pctl, 3, MRS_CMD,
188 (0x00 & BANK_ADDR_MASK) << BANK_ADDR_SHIFT |
189 ((sdram_params->phy_timing.mr[0] |
190 DDR3_DLL_RESET) &
191 CMD_ADDR_MASK) << CMD_ADDR_SHIFT);
192
193 send_command(pctl, 3, ZQCL_CMD, 0);
194 } else {
195 send_command(pctl, 3, MRS_CMD,
196 (0x63 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
197 (0 & LPDDR23_OP_MASK) <<
198 LPDDR23_OP_SHIFT);
199 rockchip_udelay(10);
200 send_command(pctl, 3, MRS_CMD,
201 (0x10 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
202 (0xff & LPDDR23_OP_MASK) <<
203 LPDDR23_OP_SHIFT);
204 rockchip_udelay(1);
205 send_command(pctl, 3, MRS_CMD,
206 (0x10 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
207 (0xff & LPDDR23_OP_MASK) <<
208 LPDDR23_OP_SHIFT);
209 rockchip_udelay(1);
210 send_command(pctl, 3, MRS_CMD,
211 (1 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
212 (sdram_params->phy_timing.mr[1] &
213 LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
214 send_command(pctl, 3, MRS_CMD,
215 (2 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
216 (sdram_params->phy_timing.mr[2] &
217 LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
218 send_command(pctl, 3, MRS_CMD,
219 (3 & LPDDR23_MA_MASK) << LPDDR23_MA_SHIFT |
220 (sdram_params->phy_timing.mr[3] &
221 LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
222 if (dramtype == LPDDR3)
223 send_command(pctl, 3, MRS_CMD, (11 & LPDDR23_MA_MASK) <<
224 LPDDR23_MA_SHIFT |
225 (sdram_params->phy_timing.mr11 &
226 LPDDR23_OP_MASK) << LPDDR23_OP_SHIFT);
227 }
228}
229
230static u32 data_training(struct chan_info *chan)
231{
232 struct rk322x_ddr_phy *ddr_phy = chan->phy;
233 struct rk322x_ddr_pctl *pctl = chan->pctl;
234 u32 value;
235 u32 bw = (readl(&ddr_phy->ddrphy_reg[0]) >> 4) & 0xf;
236 u32 ret;
237
238 /* disable auto refresh */
239 value = readl(&pctl->trefi) | (1 << 31);
240 writel(1 << 31, &pctl->trefi);
241
242 clrsetbits_le32(&ddr_phy->ddrphy_reg[2], 0x30,
243 DQS_SQU_CAL_SEL_CS0);
244 setbits_le32(&ddr_phy->ddrphy_reg[2], DQS_SQU_CAL_START);
245
246 rockchip_udelay(30);
247 ret = readl(&ddr_phy->ddrphy_reg[0xff]);
248
249 clrbits_le32(&ddr_phy->ddrphy_reg[2],
250 DQS_SQU_CAL_START);
251
252 /*
253 * since data training will take about 20us, so send some auto
254 * refresh(about 7.8us) to complement the lost time
255 */
256 send_command(pctl, 3, PREA_CMD, 0);
257 send_command(pctl, 3, REF_CMD, 0);
258
259 writel(value, &pctl->trefi);
260
261 if (ret & 0x10) {
262 ret = -1;
263 } else {
264 ret = (ret & 0xf) ^ bw;
265 ret = (ret == 0) ? 0 : -1;
266 }
267 return ret;
268}
269
270static void move_to_config_state(struct rk322x_ddr_pctl *pctl)
271{
272 unsigned int state;
273
274 while (1) {
275 state = readl(&pctl->stat) & PCTL_STAT_MASK;
276 switch (state) {
277 case LOW_POWER:
278 writel(WAKEUP_STATE, &pctl->sctl);
279 while ((readl(&pctl->stat) & PCTL_STAT_MASK)
280 != ACCESS)
281 ;
282 /*
283 * If at low power state, need wakeup first, and then
284 * enter the config, so fallthrough
285 */
286 case ACCESS:
287 /* fallthrough */
288 case INIT_MEM:
289 writel(CFG_STATE, &pctl->sctl);
290 while ((readl(&pctl->stat) & PCTL_STAT_MASK) != CONFIG)
291 ;
292 break;
293 case CONFIG:
294 return;
295 default:
296 break;
297 }
298 }
299}
300
301static void move_to_access_state(struct rk322x_ddr_pctl *pctl)
302{
303 unsigned int state;
304
305 while (1) {
306 state = readl(&pctl->stat) & PCTL_STAT_MASK;
307 switch (state) {
308 case LOW_POWER:
309 writel(WAKEUP_STATE, &pctl->sctl);
310 while ((readl(&pctl->stat) & PCTL_STAT_MASK) != ACCESS)
311 ;
312 break;
313 case INIT_MEM:
314 writel(CFG_STATE, &pctl->sctl);
315 while ((readl(&pctl->stat) & PCTL_STAT_MASK) != CONFIG)
316 ;
317 /* fallthrough */
318 case CONFIG:
319 writel(GO_STATE, &pctl->sctl);
320 while ((readl(&pctl->stat) & PCTL_STAT_MASK) != ACCESS)
321 ;
322 break;
323 case ACCESS:
324 return;
325 default:
326 break;
327 }
328 }
329}
330
331static void move_to_lowpower_state(struct rk322x_ddr_pctl *pctl)
332{
333 unsigned int state;
334
335 while (1) {
336 state = readl(&pctl->stat) & PCTL_STAT_MASK;
337 switch (state) {
338 case INIT_MEM:
339 writel(CFG_STATE, &pctl->sctl);
340 while ((readl(&pctl->stat) & PCTL_STAT_MASK) != CONFIG)
341 ;
342 /* fallthrough */
343 case CONFIG:
344 writel(GO_STATE, &pctl->sctl);
345 while ((readl(&pctl->stat) & PCTL_STAT_MASK) != ACCESS)
346 ;
347 break;
348 case ACCESS:
349 writel(SLEEP_STATE, &pctl->sctl);
350 while ((readl(&pctl->stat) & PCTL_STAT_MASK) !=
351 LOW_POWER)
352 ;
353 break;
354 case LOW_POWER:
355 return;
356 default:
357 break;
358 }
359 }
360}
361
362/* pctl should in low power mode when call this function */
363static void phy_softreset(struct dram_info *dram)
364{
365 struct rk322x_ddr_phy *ddr_phy = dram->chan[0].phy;
366 struct rk322x_grf *grf = dram->grf;
367
368 writel(GRF_DDRPHY_BUFFEREN_CORE_EN, &grf->soc_con[0]);
369 clrbits_le32(&ddr_phy->ddrphy_reg[0], 0x3 << 2);
370 rockchip_udelay(1);
371 setbits_le32(&ddr_phy->ddrphy_reg[0], 1 << 2);
372 rockchip_udelay(5);
373 setbits_le32(&ddr_phy->ddrphy_reg[0], 1 << 3);
374 writel(GRF_DDRPHY_BUFFEREN_CORE_DIS, &grf->soc_con[0]);
375}
376
377/* bw: 2: 32bit, 1:16bit */
378static void set_bw(struct dram_info *dram, u32 bw)
379{
380 struct rk322x_ddr_pctl *pctl = dram->chan[0].pctl;
381 struct rk322x_ddr_phy *ddr_phy = dram->chan[0].phy;
382 struct rk322x_grf *grf = dram->grf;
383
384 if (bw == 1) {
385 setbits_le32(&pctl->ppcfg, 1);
386 clrbits_le32(&ddr_phy->ddrphy_reg[0], 0xc << 4);
387 writel(GRF_MSCH_NOC_16BIT_EN, &grf->soc_con[0]);
388 clrbits_le32(&ddr_phy->ddrphy_reg[0x46], 0x8);
389 clrbits_le32(&ddr_phy->ddrphy_reg[0x56], 0x8);
390 } else {
391 clrbits_le32(&pctl->ppcfg, 1);
392 setbits_le32(&ddr_phy->ddrphy_reg[0], 0xf << 4);
393 writel(GRF_DDR_32BIT_EN | GRF_MSCH_NOC_32BIT_EN,
394 &grf->soc_con[0]);
395 setbits_le32(&ddr_phy->ddrphy_reg[0x46], 0x8);
396 setbits_le32(&ddr_phy->ddrphy_reg[0x56], 0x8);
397 }
398}
399
400static void pctl_cfg(struct rk322x_ddr_pctl *pctl,
401 struct rk322x_sdram_params *sdram_params,
402 struct rk322x_grf *grf)
403{
404 u32 burst_len;
405 u32 bw;
406 u32 dramtype = sdram_params->base.dramtype;
407
408 if (sdram_params->ch[0].bw == 2)
409 bw = GRF_DDR_32BIT_EN | GRF_MSCH_NOC_32BIT_EN;
410 else
411 bw = GRF_MSCH_NOC_16BIT_EN;
412
413 writel(DFI_INIT_START | DFI_DATA_BYTE_DISABLE_EN, &pctl->dfistcfg0);
414 writel(DFI_DRAM_CLK_SR_EN | DFI_DRAM_CLK_DPD_EN, &pctl->dfistcfg1);
415 writel(DFI_PARITY_INTR_EN | DFI_PARITY_EN, &pctl->dfistcfg2);
416 writel(0x51010, &pctl->dfilpcfg0);
417
418 writel(1, &pctl->dfitphyupdtype0);
419 writel(0x0d, &pctl->dfitphyrdlat);
420 writel(0, &pctl->dfitphywrdata);
421
422 writel(0, &pctl->dfiupdcfg);
423 copy_to_reg(&pctl->togcnt1u, &sdram_params->pctl_timing.togcnt1u,
424 sizeof(struct rk322x_pctl_timing));
425 if (dramtype == DDR3) {
426 writel((1 << 3) | (1 << 11),
427 &pctl->dfiodtcfg);
428 writel(7 << 16, &pctl->dfiodtcfg1);
429 writel((readl(&pctl->tcl) - 1) / 2 - 1, &pctl->dfitrddataen);
430 writel((readl(&pctl->tcwl) - 1) / 2 - 1, &pctl->dfitphywrlat);
431 writel(500, &pctl->trsth);
432 writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT | DDR3_EN |
433 DDR2_DDR3_BL_8 | (6 - 4) << TFAW_SHIFT | PD_EXIT_SLOW |
434 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
435 &pctl->mcfg);
436 writel(bw | GRF_DDR3_EN, &grf->soc_con[0]);
437 } else {
438 if (sdram_params->phy_timing.bl & PHT_BL_8)
439 burst_len = MDDR_LPDDR2_BL_8;
440 else
441 burst_len = MDDR_LPDDR2_BL_4;
442
443 writel(readl(&pctl->tcl) / 2 - 1, &pctl->dfitrddataen);
444 writel(readl(&pctl->tcwl) / 2 - 1, &pctl->dfitphywrlat);
445 writel(0, &pctl->trsth);
446 if (dramtype == LPDDR2) {
447 writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
448 LPDDR2_S4 | LPDDR2_EN | burst_len |
449 (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
450 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
451 &pctl->mcfg);
452 writel(0, &pctl->dfiodtcfg);
453 writel(0, &pctl->dfiodtcfg1);
454 } else {
455 writel(0 << MDDR_LPDDR2_CLK_STOP_IDLE_SHIFT |
456 LPDDR2_S4 | LPDDR3_EN | burst_len |
457 (6 - 4) << TFAW_SHIFT | PD_EXIT_FAST |
458 1 << PD_TYPE_SHIFT | 0 << PD_IDLE_SHIFT,
459 &pctl->mcfg);
460 writel((1 << 3) | (1 << 2), &pctl->dfiodtcfg);
461 writel((7 << 16) | 4, &pctl->dfiodtcfg1);
462 }
463 writel(bw | GRF_LPDDR2_3_EN, &grf->soc_con[0]);
464 }
465 setbits_le32(&pctl->scfg, 1);
466}
467
468static void phy_cfg(struct chan_info *chan,
469 struct rk322x_sdram_params *sdram_params)
470{
471 struct rk322x_ddr_phy *ddr_phy = chan->phy;
472 struct rk322x_service_sys *axi_bus = chan->msch;
473 struct rk322x_msch_timings *noc_timing = &sdram_params->base.noc_timing;
474 struct rk322x_phy_timing *phy_timing = &sdram_params->phy_timing;
475 struct rk322x_pctl_timing *pctl_timing = &sdram_params->pctl_timing;
476 u32 cmd_drv, clk_drv, dqs_drv, dqs_odt;
477
478 writel(noc_timing->ddrtiming, &axi_bus->ddrtiming);
479 writel(noc_timing->ddrmode, &axi_bus->ddrmode);
480 writel(noc_timing->readlatency, &axi_bus->readlatency);
481 writel(noc_timing->activate, &axi_bus->activate);
482 writel(noc_timing->devtodev, &axi_bus->devtodev);
483
484 switch (sdram_params->base.dramtype) {
485 case DDR3:
486 writel(PHY_DDR3 | phy_timing->bl, &ddr_phy->ddrphy_reg[1]);
487 break;
488 case LPDDR2:
489 writel(PHY_LPDDR2 | phy_timing->bl, &ddr_phy->ddrphy_reg[1]);
490 break;
491 default:
492 writel(PHY_LPDDR2 | phy_timing->bl, &ddr_phy->ddrphy_reg[1]);
493 break;
494 }
495
496 writel(phy_timing->cl_al, &ddr_phy->ddrphy_reg[0xb]);
497 writel(pctl_timing->tcwl, &ddr_phy->ddrphy_reg[0xc]);
498
499 cmd_drv = PHY_RON_RTT_34OHM;
500 clk_drv = PHY_RON_RTT_45OHM;
501 dqs_drv = PHY_RON_RTT_34OHM;
502 if (sdram_params->base.dramtype == LPDDR2)
503 dqs_odt = PHY_RON_RTT_DISABLE;
504 else
505 dqs_odt = PHY_RON_RTT_225OHM;
506
507 writel(cmd_drv, &ddr_phy->ddrphy_reg[0x11]);
508 clrsetbits_le32(&ddr_phy->ddrphy_reg[0x12], (0x1f << 3), cmd_drv << 3);
509 writel(clk_drv, &ddr_phy->ddrphy_reg[0x16]);
510 writel(clk_drv, &ddr_phy->ddrphy_reg[0x18]);
511
512 writel(dqs_drv, &ddr_phy->ddrphy_reg[0x20]);
513 writel(dqs_drv, &ddr_phy->ddrphy_reg[0x2f]);
514 writel(dqs_drv, &ddr_phy->ddrphy_reg[0x30]);
515 writel(dqs_drv, &ddr_phy->ddrphy_reg[0x3f]);
516 writel(dqs_drv, &ddr_phy->ddrphy_reg[0x40]);
517 writel(dqs_drv, &ddr_phy->ddrphy_reg[0x4f]);
518 writel(dqs_drv, &ddr_phy->ddrphy_reg[0x50]);
519 writel(dqs_drv, &ddr_phy->ddrphy_reg[0x5f]);
520
521 writel(dqs_odt, &ddr_phy->ddrphy_reg[0x21]);
522 writel(dqs_odt, &ddr_phy->ddrphy_reg[0x2e]);
523 writel(dqs_odt, &ddr_phy->ddrphy_reg[0x31]);
524 writel(dqs_odt, &ddr_phy->ddrphy_reg[0x3e]);
525 writel(dqs_odt, &ddr_phy->ddrphy_reg[0x41]);
526 writel(dqs_odt, &ddr_phy->ddrphy_reg[0x4e]);
527 writel(dqs_odt, &ddr_phy->ddrphy_reg[0x51]);
528 writel(dqs_odt, &ddr_phy->ddrphy_reg[0x5e]);
529}
530
531void dram_cfg_rbc(struct chan_info *chan,
532 struct rk322x_sdram_params *sdram_params)
533{
534 char noc_config;
535 int i = 0;
536 struct rk322x_sdram_channel *config = &sdram_params->ch[0];
537 struct rk322x_service_sys *axi_bus = chan->msch;
538
539 move_to_config_state(chan->pctl);
540
541 if ((config->rank == 2) && (config->cs1_row == config->cs0_row)) {
542 if ((config->col + config->bw) == 12) {
543 i = 14;
544 goto finish;
545 } else if ((config->col + config->bw) == 11) {
546 i = 15;
547 goto finish;
548 }
549 }
550 noc_config = ((config->cs0_row - 13) << 4) | ((config->bk - 2) << 2) |
551 (config->col + config->bw - 11);
552 for (i = 0; i < 11; i++) {
553 if (noc_config == ddr_cfg_2_rbc[i])
554 break;
555 }
556
557 if (i < 11)
558 goto finish;
559
560 noc_config = ((config->bk - 2) << 6) | ((config->cs0_row - 13) << 4) |
561 (config->col + config->bw - 11);
562
563 for (i = 11; i < 14; i++) {
564 if (noc_config == ddr_cfg_2_rbc[i])
565 break;
566 }
567 if (i < 14)
568 goto finish;
569 else
570 i = 0;
571
572finish:
573 writel(i, &axi_bus->ddrconf);
574 move_to_access_state(chan->pctl);
575}
576
577static void dram_all_config(const struct dram_info *dram,
578 struct rk322x_sdram_params *sdram_params)
579{
580 struct rk322x_sdram_channel *info = &sdram_params->ch[0];
581 u32 sys_reg = 0;
582
583 sys_reg |= sdram_params->base.dramtype << SYS_REG_DDRTYPE_SHIFT;
584 sys_reg |= (1 - 1) << SYS_REG_NUM_CH_SHIFT;
585 sys_reg |= info->row_3_4 << SYS_REG_ROW_3_4_SHIFT(0);
586 sys_reg |= 1 << SYS_REG_CHINFO_SHIFT(0);
587 sys_reg |= (info->rank - 1) << SYS_REG_RANK_SHIFT(0);
588 sys_reg |= (info->col - 9) << SYS_REG_COL_SHIFT(0);
589 sys_reg |= info->bk == 3 ? 0 : 1 << SYS_REG_BK_SHIFT(0);
590 sys_reg |= (info->cs0_row - 13) << SYS_REG_CS0_ROW_SHIFT(0);
591 sys_reg |= (info->cs1_row - 13) << SYS_REG_CS1_ROW_SHIFT(0);
592 sys_reg |= (2 >> info->bw) << SYS_REG_BW_SHIFT(0);
593 sys_reg |= (2 >> info->dbw) << SYS_REG_DBW_SHIFT(0);
594
595 writel(sys_reg, &dram->grf->os_reg[2]);
596}
597
598#define TEST_PATTEN 0x5aa5f00f
599
600static int dram_cap_detect(struct dram_info *dram,
601 struct rk322x_sdram_params *sdram_params)
602{
603 u32 bw, row, col, addr;
604 u32 ret = 0;
605 struct rk322x_service_sys *axi_bus = dram->chan[0].msch;
606
607 if (sdram_params->base.dramtype == DDR3)
608 sdram_params->ch[0].dbw = 1;
609 else
610 sdram_params->ch[0].dbw = 2;
611
612 move_to_config_state(dram->chan[0].pctl);
613 /* bw detect */
614 set_bw(dram, 2);
615 if (data_training(&dram->chan[0]) == 0) {
616 bw = 2;
617 } else {
618 bw = 1;
619 set_bw(dram, 1);
620 move_to_lowpower_state(dram->chan[0].pctl);
621 phy_softreset(dram);
622 move_to_config_state(dram->chan[0].pctl);
623 if (data_training(&dram->chan[0])) {
624 printf("BW detect error\n");
625 ret = -EINVAL;
626 }
627 }
628 sdram_params->ch[0].bw = bw;
629 sdram_params->ch[0].bk = 3;
630
631 if (bw == 2)
632 writel(6, &axi_bus->ddrconf);
633 else
634 writel(3, &axi_bus->ddrconf);
635 move_to_access_state(dram->chan[0].pctl);
636 for (col = 11; col >= 9; col--) {
637 writel(0, CONFIG_SYS_SDRAM_BASE);
638 addr = CONFIG_SYS_SDRAM_BASE +
639 (1 << (col + bw - 1));
640 writel(TEST_PATTEN, addr);
641 if ((readl(addr) == TEST_PATTEN) &&
642 (readl(CONFIG_SYS_SDRAM_BASE) == 0))
643 break;
644 }
645 if (col == 8) {
646 printf("Col detect error\n");
647 ret = -EINVAL;
648 goto out;
649 } else {
650 sdram_params->ch[0].col = col;
651 }
652
653 writel(10, &axi_bus->ddrconf);
654
655 /* Detect row*/
656 for (row = 16; row >= 12; row--) {
657 writel(0, CONFIG_SYS_SDRAM_BASE);
658 addr = CONFIG_SYS_SDRAM_BASE + (1u << (row + 11 + 3 - 1));
659 writel(TEST_PATTEN, addr);
660 if ((readl(addr) == TEST_PATTEN) &&
661 (readl(CONFIG_SYS_SDRAM_BASE) == 0))
662 break;
663 }
664 if (row == 11) {
665 printf("Row detect error\n");
666 ret = -EINVAL;
667 } else {
668 sdram_params->ch[0].cs1_row = row;
669 sdram_params->ch[0].row_3_4 = 0;
670 sdram_params->ch[0].cs0_row = row;
671 }
672 /* cs detect */
673 writel(0, CONFIG_SYS_SDRAM_BASE);
674 writel(TEST_PATTEN, CONFIG_SYS_SDRAM_BASE + (1u << 30));
675 writel(~TEST_PATTEN, CONFIG_SYS_SDRAM_BASE + (1u << 30) + 4);
676 if ((readl(CONFIG_SYS_SDRAM_BASE + (1u << 30)) == TEST_PATTEN) &&
677 (readl(CONFIG_SYS_SDRAM_BASE) == 0))
678 sdram_params->ch[0].rank = 2;
679 else
680 sdram_params->ch[0].rank = 1;
681out:
682 return ret;
683}
684
685static int sdram_init(struct dram_info *dram,
686 struct rk322x_sdram_params *sdram_params)
687{
688 int ret;
689
690 ret = clk_set_rate(&dram->ddr_clk,
691 sdram_params->base.ddr_freq * MHz * 2);
692 if (ret < 0) {
693 printf("Could not set DDR clock\n");
694 return ret;
695 }
696
697 phy_pctrl_reset(dram->cru, dram->chan[0].phy);
698 phy_dll_bypass_set(dram->chan[0].phy, sdram_params->base.ddr_freq);
699 pctl_cfg(dram->chan[0].pctl, sdram_params, dram->grf);
700 phy_cfg(&dram->chan[0], sdram_params);
701 writel(POWER_UP_START, &dram->chan[0].pctl->powctl);
702 while (!(readl(&dram->chan[0].pctl->powstat) & POWER_UP_DONE))
703 ;
704 memory_init(&dram->chan[0], sdram_params);
705 move_to_access_state(dram->chan[0].pctl);
706 ret = dram_cap_detect(dram, sdram_params);
707 if (ret)
708 goto out;
709 dram_cfg_rbc(&dram->chan[0], sdram_params);
710 dram_all_config(dram, sdram_params);
711out:
712 return ret;
713}
714
715static int rk322x_dmc_ofdata_to_platdata(struct udevice *dev)
716{
717#if !CONFIG_IS_ENABLED(OF_PLATDATA)
718 struct rk322x_sdram_params *params = dev_get_platdata(dev);
719 const void *blob = gd->fdt_blob;
720 int node = dev_of_offset(dev);
721 int ret;
722
723 params->num_channels = 1;
724
725 ret = fdtdec_get_int_array(blob, node, "rockchip,pctl-timing",
726 (u32 *)&params->pctl_timing,
727 sizeof(params->pctl_timing) / sizeof(u32));
728 if (ret) {
729 printf("%s: Cannot read rockchip,pctl-timing\n", __func__);
730 return -EINVAL;
731 }
732 ret = fdtdec_get_int_array(blob, node, "rockchip,phy-timing",
733 (u32 *)&params->phy_timing,
734 sizeof(params->phy_timing) / sizeof(u32));
735 if (ret) {
736 printf("%s: Cannot read rockchip,phy-timing\n", __func__);
737 return -EINVAL;
738 }
739 ret = fdtdec_get_int_array(blob, node, "rockchip,sdram-params",
740 (u32 *)&params->base,
741 sizeof(params->base) / sizeof(u32));
742 if (ret) {
743 printf("%s: Cannot read rockchip,sdram-params\n", __func__);
744 return -EINVAL;
745 }
Masahiro Yamadae4873e32018-04-19 12:14:03 +0900746 ret = regmap_init_mem(dev_ofnode(dev), &params->map);
Kever Yang1a94b9e2017-09-27 16:38:22 +0800747 if (ret)
748 return ret;
749#endif
750
751 return 0;
752}
Kever Yang956798c2019-04-02 20:41:19 +0800753#endif /* CONFIG_TPL_BUILD */
Kever Yang1a94b9e2017-09-27 16:38:22 +0800754
755#if CONFIG_IS_ENABLED(OF_PLATDATA)
756static int conv_of_platdata(struct udevice *dev)
757{
758 struct rk322x_sdram_params *plat = dev_get_platdata(dev);
759 struct dtd_rockchip_rk322x_dmc *of_plat = &plat->of_plat;
760 int ret;
761
762 memcpy(&plat->pctl_timing, of_plat->rockchip_pctl_timing,
763 sizeof(plat->pctl_timing));
764 memcpy(&plat->phy_timing, of_plat->rockchip_phy_timing,
765 sizeof(plat->phy_timing));
766 memcpy(&plat->base, of_plat->rockchip_sdram_params, sizeof(plat->base));
767
768 plat->num_channels = 1;
769 ret = regmap_init_mem_platdata(dev, of_plat->reg,
770 ARRAY_SIZE(of_plat->reg) / 2,
771 &plat->map);
772 if (ret)
773 return ret;
774
775 return 0;
776}
777#endif
778
779static int rk322x_dmc_probe(struct udevice *dev)
780{
Kever Yang956798c2019-04-02 20:41:19 +0800781#ifdef CONFIG_TPL_BUILD
Kever Yang1a94b9e2017-09-27 16:38:22 +0800782 struct rk322x_sdram_params *plat = dev_get_platdata(dev);
783 int ret;
784 struct udevice *dev_clk;
785#endif
786 struct dram_info *priv = dev_get_priv(dev);
787
788 priv->grf = syscon_get_first_range(ROCKCHIP_SYSCON_GRF);
Kever Yang956798c2019-04-02 20:41:19 +0800789#ifdef CONFIG_TPL_BUILD
Kever Yang1a94b9e2017-09-27 16:38:22 +0800790#if CONFIG_IS_ENABLED(OF_PLATDATA)
791 ret = conv_of_platdata(dev);
792 if (ret)
793 return ret;
794#endif
795
796 priv->chan[0].msch = syscon_get_first_range(ROCKCHIP_SYSCON_MSCH);
797 priv->chan[0].pctl = regmap_get_range(plat->map, 0);
798 priv->chan[0].phy = regmap_get_range(plat->map, 1);
799 ret = rockchip_get_clk(&dev_clk);
800 if (ret)
801 return ret;
802 priv->ddr_clk.id = CLK_DDR;
803 ret = clk_request(dev_clk, &priv->ddr_clk);
804 if (ret)
805 return ret;
806
807 priv->cru = rockchip_get_cru();
808 if (IS_ERR(priv->cru))
809 return PTR_ERR(priv->cru);
810 ret = sdram_init(priv, plat);
811 if (ret)
812 return ret;
813#else
814 priv->info.base = CONFIG_SYS_SDRAM_BASE;
815 priv->info.size = rockchip_sdram_size(
816 (phys_addr_t)&priv->grf->os_reg[2]);
817#endif
818
819 return 0;
820}
821
822static int rk322x_dmc_get_info(struct udevice *dev, struct ram_info *info)
823{
824 struct dram_info *priv = dev_get_priv(dev);
825
826 *info = priv->info;
827
828 return 0;
829}
830
831static struct ram_ops rk322x_dmc_ops = {
832 .get_info = rk322x_dmc_get_info,
833};
834
835static const struct udevice_id rk322x_dmc_ids[] = {
836 { .compatible = "rockchip,rk3228-dmc" },
837 { }
838};
839
840U_BOOT_DRIVER(dmc_rk322x) = {
841 .name = "rockchip_rk322x_dmc",
842 .id = UCLASS_RAM,
843 .of_match = rk322x_dmc_ids,
844 .ops = &rk322x_dmc_ops,
Kever Yang956798c2019-04-02 20:41:19 +0800845#ifdef CONFIG_TPL_BUILD
Kever Yang1a94b9e2017-09-27 16:38:22 +0800846 .ofdata_to_platdata = rk322x_dmc_ofdata_to_platdata,
847#endif
848 .probe = rk322x_dmc_probe,
849 .priv_auto_alloc_size = sizeof(struct dram_info),
Kever Yang956798c2019-04-02 20:41:19 +0800850#ifdef CONFIG_TPL_BUILD
Kever Yang1a94b9e2017-09-27 16:38:22 +0800851 .platdata_auto_alloc_size = sizeof(struct rk322x_sdram_params),
852#endif
853};
854