blob: 7832485f777c3102f590d82f3e7834d86d2aa3dc [file] [log] [blame]
Jagan Tekid0af73c2022-12-14 23:20:53 +05301// SPDX-License-Identifier: GPL-2.0
2/*
3 * (C) Copyright 2020 Rockchip Electronics Co., Ltd.
4 * Copyright (c) 2022 Edgeble AI Technologies Pvt. Ltd.
5 */
6
7#include <common.h>
8#include <debug_uart.h>
9#include <dm.h>
10#include <ram.h>
11#include <syscon.h>
12#include <asm/io.h>
13#include <asm/arch-rockchip/clock.h>
14#include <asm/arch-rockchip/hardware.h>
15#include <asm/arch-rockchip/cru_rv1126.h>
16#include <asm/arch-rockchip/grf_rv1126.h>
17#include <asm/arch-rockchip/sdram_common.h>
18#include <asm/arch-rockchip/sdram_rv1126.h>
19#include <linux/delay.h>
20
21/* define training flag */
22#define CA_TRAINING (0x1 << 0)
23#define READ_GATE_TRAINING (0x1 << 1)
24#define WRITE_LEVELING (0x1 << 2)
25#define WRITE_TRAINING (0x1 << 3)
26#define READ_TRAINING (0x1 << 4)
27#define FULL_TRAINING (0xff)
28
29#define SKEW_RX_SIGNAL (0)
30#define SKEW_TX_SIGNAL (1)
31#define SKEW_CA_SIGNAL (2)
32
33#define DESKEW_MDF_ABS_VAL (0)
34#define DESKEW_MDF_DIFF_VAL (1)
35
36struct dram_info {
37#if defined(CONFIG_TPL_BUILD) || \
38 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
39 void __iomem *pctl;
40 void __iomem *phy;
41 struct rv1126_cru *cru;
42 struct msch_regs *msch;
43 struct rv1126_ddrgrf *ddrgrf;
44 struct rv1126_grf *grf;
45 u32 sr_idle;
46 u32 pd_idle;
47#endif
48 struct ram_info info;
49 struct rv1126_pmugrf *pmugrf;
50};
51
52#if defined(CONFIG_TPL_BUILD) || \
53 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
54
55#define GRF_BASE_ADDR 0xfe000000
56#define PMU_GRF_BASE_ADDR 0xfe020000
57#define DDR_GRF_BASE_ADDR 0xfe030000
58#define BUS_SGRF_BASE_ADDR 0xfe0a0000
59#define SERVER_MSCH_BASE_ADDR 0xfe800000
60#define CRU_BASE_ADDR 0xff490000
61#define DDR_PHY_BASE_ADDR 0xff4a0000
62#define UPCTL2_BASE_ADDR 0xffa50000
63
64#define SGRF_SOC_CON2 0x8
65#define SGRF_SOC_CON12 0x30
66#define SGRF_SOC_CON13 0x34
67
68struct dram_info dram_info;
69
70struct rv1126_sdram_params sdram_configs[] = {
71# include "sdram-rv1126-ddr3-detect-328.inc"
72# include "sdram-rv1126-ddr3-detect-396.inc"
73# include "sdram-rv1126-ddr3-detect-528.inc"
74# include "sdram-rv1126-ddr3-detect-664.inc"
75# include "sdram-rv1126-ddr3-detect-784.inc"
76# include "sdram-rv1126-ddr3-detect-924.inc"
77# include "sdram-rv1126-ddr3-detect-1056.inc"
78};
79
80u32 common_info[] = {
81#include "sdram-rv1126-loader_params.inc"
82};
83
84#if defined(CONFIG_CMD_DDR_TEST_TOOL)
85static struct rw_trn_result rw_trn_result;
86#endif
87
88static struct rv1126_fsp_param fsp_param[MAX_IDX];
89
90static u8 lp3_odt_value;
91
92static s8 wrlvl_result[2][4];
93
94/* DDR configuration 0-9 */
95u16 ddr_cfg_2_rbc[] = {
96 ((0 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 3), /* 0 */
97 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 2), /* 1 */
98 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 3), /* 2 */
99 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 1), /* 3 */
100 ((0 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 4), /* 4 */
101 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 1), /* 5 */
102 ((0 << 8) | (3 << 5) | (1 << 4) | (1 << 3) | 2), /* 6 */
103 ((0 << 8) | (2 << 5) | (1 << 4) | (1 << 3) | 3), /* 7 */
104 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 2), /* 8 */
105 ((1 << 8) | (2 << 5) | (0 << 4) | (1 << 3) | 2) /* 9 */
106};
107
108/* DDR configuration 10-21 */
109u8 ddr4_cfg_2_rbc[] = {
110 ((0 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 0), /* 10 */
111 ((1 << 7) | (2 << 4) | (0 << 3) | (2 << 1) | 0), /* 11 */
112 ((0 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 0), /* 12 */
113 ((1 << 7) | (3 << 4) | (0 << 3) | (1 << 1) | 0), /* 13 */
114 ((0 << 7) | (4 << 4) | (0 << 3) | (2 << 1) | 1), /* 14 */
115 ((1 << 7) | (3 << 4) | (0 << 3) | (2 << 1) | 1), /* 15 */
116 ((1 << 7) | (4 << 4) | (0 << 3) | (1 << 1) | 1), /* 16 */
117 ((0 << 7) | (2 << 4) | (1 << 3) | (2 << 1) | 0), /* 17 */
118 ((0 << 7) | (3 << 4) | (1 << 3) | (1 << 1) | 0), /* 18 */
119 ((0 << 7) | (3 << 4) | (1 << 3) | (2 << 1) | 1), /* 19 */
120 ((0 << 7) | (4 << 4) | (1 << 3) | (1 << 1) | 1), /* 20 */
121 ((1 << 7) | (4 << 4) | (0 << 3) | (0 << 1) | 0) /* 21 */
122};
123
124/* DDR configuration 22-28 */
125u16 ddr_cfg_2_rbc_p2[] = {
126 ((1 << 8) | (3 << 5) | (0 << 4) | (1 << 3) | 0), /* 22 */
127 ((0 << 8) | (4 << 5) | (0 << 4) | (1 << 3) | 2), /* 23 */
128 ((1 << 8) | (3 << 5) | (0 << 4) | (0 << 3) | 3), /* 24 */
129 ((0 << 8) | (3 << 5) | (1 << 4) | (0 << 3) | 3), /* 25 */
130 ((0 << 8) | (4 << 5) | (1 << 4) | (0 << 3) | 2), /* 26 */
131 ((1 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 2), /* 27 */
132 ((0 << 8) | (4 << 5) | (0 << 4) | (0 << 3) | 3) /* 28 */
133};
134
135u8 d4_rbc_2_d3_rbc[][2] = {
136 {10, 0},
137 {11, 2},
138 {12, 23},
139 {13, 1},
140 {14, 28},
141 {15, 24},
142 {16, 27},
143 {17, 7},
144 {18, 6},
145 {19, 25},
146 {20, 26},
147 {21, 3}
148};
149
150u32 addrmap[29][9] = {
151 {24, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
152 0x08080808, 0x00000f0f, 0x3f3f}, /* 0 */
153 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
154 0x07070707, 0x00000f0f, 0x3f3f}, /* 1 */
155 {23, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
156 0x0f080808, 0x00000f0f, 0x3f3f}, /* 2 */
157 {22, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x06060606,
158 0x06060606, 0x00000f0f, 0x3f3f}, /* 3 */
159 {24, 0x000a0a0a, 0x00000000, 0x00000000, 0x00000000, 0x09090909,
160 0x0f090909, 0x00000f0f, 0x3f3f}, /* 4 */
161 {6, 0x00070707, 0x00000000, 0x1f000000, 0x00001f1f, 0x07070707,
162 0x07070707, 0x00000f0f, 0x3f3f}, /* 5 */
163 {7, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x08080808,
164 0x08080808, 0x00000f0f, 0x3f3f}, /* 6 */
165 {8, 0x00090909, 0x00000000, 0x00000000, 0x00001f00, 0x09090909,
166 0x0f090909, 0x00000f0f, 0x3f3f}, /* 7 */
167 {22, 0x001f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
168 0x06060606, 0x00000f0f, 0x3f3f}, /* 8 */
169 {23, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
170 0x0f070707, 0x00000f0f, 0x3f3f}, /* 9 */
171
172 {24, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
173 0x08080808, 0x00000f0f, 0x0801}, /* 10 */
174 {23, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
175 0x0f080808, 0x00000f0f, 0x0801}, /* 11 */
176 {24, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
177 0x07070707, 0x00000f07, 0x0700}, /* 12 */
178 {23, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
179 0x07070707, 0x00000f0f, 0x0700}, /* 13 */
180 {24, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
181 0x07070707, 0x00000f07, 0x3f01}, /* 14 */
182 {23, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x07070707,
183 0x07070707, 0x00000f0f, 0x3f01}, /* 15 */
184 {23, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x06060606,
185 0x06060606, 0x00000f06, 0x3f00}, /* 16 */
186 {8, 0x003f0a0a, 0x01010100, 0x01010101, 0x00001f1f, 0x09090909,
187 0x0f090909, 0x00000f0f, 0x0801}, /* 17 */
188 {7, 0x003f0909, 0x00000007, 0x1f000000, 0x00001f1f, 0x08080808,
189 0x08080808, 0x00000f0f, 0x0700}, /* 18 */
190 {7, 0x003f0909, 0x01010100, 0x01010101, 0x00001f1f, 0x08080808,
191 0x08080808, 0x00000f0f, 0x3f01}, /* 19 */
192
193 {6, 0x003f0808, 0x00000007, 0x1f000000, 0x00001f1f, 0x07070707,
194 0x07070707, 0x00000f07, 0x3f00}, /* 20 */
195 {23, 0x003f0909, 0x00000006, 0x1f1f0000, 0x00001f1f, 0x06060606,
196 0x06060606, 0x00000f06, 0x0600}, /* 21 */
197 {21, 0x00060606, 0x00000000, 0x1f1f0000, 0x00001f1f, 0x05050505,
198 0x05050505, 0x00000f0f, 0x3f3f}, /* 22 */
199
200 {24, 0x00080808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
201 0x07070707, 0x00000f07, 0x3f3f}, /* 23 */
202 {23, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
203 0x07070707, 0x00000f0f, 0x3f3f}, /* 24 */
204 {7, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x08080808,
205 0x08080808, 0x00000f0f, 0x3f3f}, /* 25 */
206 {6, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x07070707,
207 0x07070707, 0x00000f07, 0x3f3f}, /* 26 */
208 {23, 0x003f0808, 0x00000000, 0x00000000, 0x00001f1f, 0x06060606,
209 0x06060606, 0x00000f06, 0x3f3f}, /* 27 */
210 {24, 0x003f0909, 0x00000000, 0x00000000, 0x00001f00, 0x07070707,
211 0x07070707, 0x00000f07, 0x3f3f} /* 28 */
212};
213
214static u8 dq_sel[22][3] = {
215 {0x0, 0x17, 0x22},
216 {0x1, 0x18, 0x23},
217 {0x2, 0x19, 0x24},
218 {0x3, 0x1a, 0x25},
219 {0x4, 0x1b, 0x26},
220 {0x5, 0x1c, 0x27},
221 {0x6, 0x1d, 0x28},
222 {0x7, 0x1e, 0x29},
223 {0x8, 0x16, 0x21},
224 {0x9, 0x1f, 0x2a},
225 {0xa, 0x20, 0x2b},
226 {0x10, 0x1, 0xc},
227 {0x11, 0x2, 0xd},
228 {0x12, 0x3, 0xe},
229 {0x13, 0x4, 0xf},
230 {0x14, 0x5, 0x10},
231 {0x15, 0x6, 0x11},
232 {0x16, 0x7, 0x12},
233 {0x17, 0x8, 0x13},
234 {0x18, 0x0, 0xb},
235 {0x19, 0x9, 0x14},
236 {0x1a, 0xa, 0x15}
237};
238
239static u16 grp_addr[4] = {
240 ADD_GROUP_CS0_A,
241 ADD_GROUP_CS0_B,
242 ADD_GROUP_CS1_A,
243 ADD_GROUP_CS1_B
244};
245
246static u8 wrlvl_result_offset[2][4] = {
247 {0xa0 + 0x26, 0xa0 + 0x27, 0xd0 + 0x26, 0xd0 + 0x27},
248 {0xa0 + 0x28, 0xa0 + 0x29, 0xd0 + 0x28, 0xd0 + 0x29},
249};
250
251static u16 dqs_dq_skew_adr[16] = {
252 0x170 + 0, /* SKEW_UPDATE_RX_CS0_DQS0 */
253 0x170 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS1 */
254 0x1d0 + 0, /* SKEW_UPDATE_RX_CS0_DQS2 */
255 0x1d0 + 0xb, /* SKEW_UPDATE_RX_CS0_DQS3 */
256 0x1a0 + 0, /* SKEW_UPDATE_RX_CS1_DQS0 */
257 0x1a0 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS1 */
258 0x200 + 0, /* SKEW_UPDATE_RX_CS1_DQS2 */
259 0x200 + 0xb, /* SKEW_UPDATE_RX_CS1_DQS3 */
260 0x170 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS0 */
261 0x170 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS1 */
262 0x1d0 + 0x16, /* SKEW_UPDATE_TX_CS0_DQS2 */
263 0x1d0 + 0x21, /* SKEW_UPDATE_TX_CS0_DQS3 */
264 0x1a0 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS0 */
265 0x1a0 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS1 */
266 0x200 + 0x16, /* SKEW_UPDATE_TX_CS1_DQS2 */
267 0x200 + 0x21, /* SKEW_UPDATE_TX_CS1_DQS3 */
268};
269
270static void rkclk_ddr_reset(struct dram_info *dram,
271 u32 ctl_srstn, u32 ctl_psrstn,
272 u32 phy_srstn, u32 phy_psrstn)
273{
274 writel(UPCTL2_SRSTN_REQ(ctl_srstn) | UPCTL2_PSRSTN_REQ(ctl_psrstn) |
275 UPCTL2_ASRSTN_REQ(ctl_srstn),
276 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON13);
277
278 writel(DDRPHY_SRSTN_REQ(phy_srstn) | DDRPHY_PSRSTN_REQ(phy_psrstn),
279 &dram->cru->softrst_con[12]);
280}
281
282static void rkclk_set_dpll(struct dram_info *dram, unsigned int hz)
283{
284 unsigned int refdiv, postdiv1, postdiv2, fbdiv;
285 int delay = 1000;
286 u32 mhz = hz / MHz;
287 struct global_info *gbl_info;
288 struct sdram_head_info_index_v2 *index =
289 (struct sdram_head_info_index_v2 *)common_info;
290 u32 ssmod_info;
291 u32 dsmpd = 1;
292
293 gbl_info = (struct global_info *)((void *)common_info +
294 index->global_index.offset * 4);
295 ssmod_info = gbl_info->info_2t;
296 refdiv = 1;
297 if (mhz <= 100) {
298 postdiv1 = 6;
299 postdiv2 = 4;
300 } else if (mhz <= 150) {
301 postdiv1 = 4;
302 postdiv2 = 4;
303 } else if (mhz <= 200) {
304 postdiv1 = 6;
305 postdiv2 = 2;
306 } else if (mhz <= 300) {
307 postdiv1 = 4;
308 postdiv2 = 2;
309 } else if (mhz <= 400) {
310 postdiv1 = 6;
311 postdiv2 = 1;
312 } else {
313 postdiv1 = 4;
314 postdiv2 = 1;
315 }
316 fbdiv = (mhz * refdiv * postdiv1 * postdiv2) / 24;
317
318 writel(DPLL_MODE(CLOCK_FROM_XIN_OSC), &dram->cru->mode);
319
320 writel(0x1f000000, &dram->cru->clksel_con[64]);
321 writel(POSTDIV1(postdiv1) | FBDIV(fbdiv), &dram->cru->pll[1].con0);
322 /* enable ssmod */
323 if (PLL_SSMOD_SPREAD(ssmod_info)) {
324 dsmpd = 0;
325 clrsetbits_le32(&dram->cru->pll[1].con2,
326 0xffffff << 0, 0x0 << 0);
327 writel(SSMOD_SPREAD(PLL_SSMOD_SPREAD(ssmod_info)) |
328 SSMOD_DIVVAL(PLL_SSMOD_DIV(ssmod_info)) |
329 SSMOD_DOWNSPREAD(PLL_SSMOD_DOWNSPREAD(ssmod_info)) |
330 SSMOD_RESET(0) |
331 SSMOD_DIS_SSCG(0) |
332 SSMOD_BP(0),
333 &dram->cru->pll[1].con3);
334 }
335 writel(DSMPD(dsmpd) | POSTDIV2(postdiv2) | REFDIV(refdiv),
336 &dram->cru->pll[1].con1);
337
338 while (delay > 0) {
339 udelay(1);
340 if (LOCK(readl(&dram->cru->pll[1].con1)))
341 break;
342 delay--;
343 }
344
345 writel(DPLL_MODE(CLOCK_FROM_PLL), &dram->cru->mode);
346}
347
348static void rkclk_configure_ddr(struct dram_info *dram,
349 struct rv1126_sdram_params *sdram_params)
350{
351 /* for inno ddr phy need freq / 2 */
352 rkclk_set_dpll(dram, sdram_params->base.ddr_freq * MHZ / 2);
353}
354
355static unsigned int
356 calculate_ddrconfig(struct rv1126_sdram_params *sdram_params)
357{
358 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
359 u32 cs, bw, die_bw, col, row, bank;
360 u32 cs1_row;
361 u32 i, tmp;
362 u32 ddrconf = -1;
363 u32 row_3_4;
364
365 cs = cap_info->rank;
366 bw = cap_info->bw;
367 die_bw = cap_info->dbw;
368 col = cap_info->col;
369 row = cap_info->cs0_row;
370 cs1_row = cap_info->cs1_row;
371 bank = cap_info->bk;
372 row_3_4 = cap_info->row_3_4;
373
374 if (sdram_params->base.dramtype == DDR4) {
375 if (cs == 2 && row == cs1_row && !row_3_4) {
376 tmp = ((row - 13) << 4) | (1 << 3) | (bw << 1) |
377 die_bw;
378 for (i = 17; i < 21; i++) {
379 if (((tmp & 0xf) ==
380 (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
381 ((tmp & 0x70) <=
382 (ddr4_cfg_2_rbc[i - 10] & 0x70))) {
383 ddrconf = i;
384 goto out;
385 }
386 }
387 }
388
389 tmp = ((cs - 1) << 7) | ((row - 13) << 4) | (bw << 1) | die_bw;
390 for (i = 10; i < 21; i++) {
391 if (((tmp & 0xf) == (ddr4_cfg_2_rbc[i - 10] & 0xf)) &&
392 ((tmp & 0x70) <= (ddr4_cfg_2_rbc[i - 10] & 0x70)) &&
393 ((tmp & 0x80) <= (ddr4_cfg_2_rbc[i - 10] & 0x80))) {
394 ddrconf = i;
395 goto out;
396 }
397 }
398 } else {
399 if (cs == 2 && row == cs1_row && bank == 3) {
400 for (i = 5; i < 8; i++) {
401 if (((bw + col - 10) == (ddr_cfg_2_rbc[i] &
402 0x7)) &&
403 ((row - 13) << 5) <= (ddr_cfg_2_rbc[i] &
404 (0x7 << 5))) {
405 ddrconf = i;
406 goto out;
407 }
408 }
409 }
410
411 tmp = ((cs - 1) << 8) | ((row - 13) << 5) |
412 ((bw + col - 10) << 0);
413 if (bank == 3)
414 tmp |= (1 << 3);
415
416 for (i = 0; i < 9; i++)
417 if (((tmp & 0x1f) == (ddr_cfg_2_rbc[i] & 0x1f)) &&
418 ((tmp & (7 << 5)) <=
419 (ddr_cfg_2_rbc[i] & (7 << 5))) &&
420 ((tmp & (1 << 8)) <=
421 (ddr_cfg_2_rbc[i] & (1 << 8)))) {
422 ddrconf = i;
423 goto out;
424 }
425
426 for (i = 0; i < 7; i++)
427 if (((tmp & 0x1f) == (ddr_cfg_2_rbc_p2[i] & 0x1f)) &&
428 ((tmp & (7 << 5)) <=
429 (ddr_cfg_2_rbc_p2[i] & (7 << 5))) &&
430 ((tmp & (1 << 8)) <=
431 (ddr_cfg_2_rbc_p2[i] & (1 << 8)))) {
432 ddrconf = i + 22;
433 goto out;
434 }
435
436 if (cs == 1 && bank == 3 && row <= 17 &&
437 (col + bw) == 12)
438 ddrconf = 23;
439 }
440
441out:
442 if (ddrconf > 28)
443 printascii("calculate ddrconfig error\n");
444
445 if (sdram_params->base.dramtype == DDR4) {
446 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
447 if (ddrconf == d4_rbc_2_d3_rbc[i][0]) {
448 if (ddrconf == 21 && row > 16)
449 printascii("warn:ddrconf21 row > 16\n");
450 else
451 ddrconf = d4_rbc_2_d3_rbc[i][1];
452 break;
453 }
454 }
455 }
456
457 return ddrconf;
458}
459
460static void sw_set_req(struct dram_info *dram)
461{
462 void __iomem *pctl_base = dram->pctl;
463
464 /* clear sw_done=0 */
465 writel(PCTL2_SW_DONE_CLEAR, pctl_base + DDR_PCTL2_SWCTL);
466}
467
468static void sw_set_ack(struct dram_info *dram)
469{
470 void __iomem *pctl_base = dram->pctl;
471
472 /* set sw_done=1 */
473 writel(PCTL2_SW_DONE, pctl_base + DDR_PCTL2_SWCTL);
474 while (1) {
475 /* wait programming done */
476 if (readl(pctl_base + DDR_PCTL2_SWSTAT) &
477 PCTL2_SW_DONE_ACK)
478 break;
479 }
480}
481
482static void set_ctl_address_map(struct dram_info *dram,
483 struct rv1126_sdram_params *sdram_params)
484{
485 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
486 void __iomem *pctl_base = dram->pctl;
487 u32 ddrconf = cap_info->ddrconfig;
488 u32 i, row;
489
490 row = cap_info->cs0_row;
491 if (sdram_params->base.dramtype == DDR4) {
492 for (i = 0; i < ARRAY_SIZE(d4_rbc_2_d3_rbc) ; i++) {
493 if (ddrconf == d4_rbc_2_d3_rbc[i][1]) {
494 ddrconf = d4_rbc_2_d3_rbc[i][0];
495 break;
496 }
497 }
498 }
499
500 if (ddrconf >= ARRAY_SIZE(addrmap)) {
501 printascii("set ctl address map fail\n");
502 return;
503 }
504
505 sdram_copy_to_reg((u32 *)(pctl_base + DDR_PCTL2_ADDRMAP0),
506 &addrmap[ddrconf][0], ARRAY_SIZE(addrmap[ddrconf]) * 4);
507
508 /* unused row set to 0xf */
509 for (i = 17; i >= row; i--)
510 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6 +
511 ((i - 12) * 8 / 32) * 4,
512 0xf << ((i - 12) * 8 % 32));
513
514 if (sdram_params->base.dramtype == LPDDR3 && cap_info->row_3_4)
515 setbits_le32(pctl_base + DDR_PCTL2_ADDRMAP6, 1 << 31);
516 if (sdram_params->base.dramtype == DDR4 && cap_info->bw == 0x1)
517 setbits_le32(pctl_base + DDR_PCTL2_PCCFG, 1 << 8);
518
519 if (cap_info->rank == 1)
520 clrsetbits_le32(pctl_base + DDR_PCTL2_ADDRMAP0, 0x1f, 0x1f);
521}
522
523static void phy_pll_set(struct dram_info *dram, u32 freq, u32 wait)
524{
525 void __iomem *phy_base = dram->phy;
526 u32 fbdiv, prediv, postdiv, postdiv_en;
527
528 if (wait) {
529 clrbits_le32(PHY_REG(phy_base, 0x53), PHY_PD_DISB);
530 while (!(readl(PHY_REG(phy_base, 0x90)) & PHY_PLL_LOCK))
531 continue;
532 } else {
533 freq /= MHz;
534 prediv = 1;
535 if (freq <= 200) {
536 fbdiv = 16;
537 postdiv = 2;
538 postdiv_en = 1;
539 } else if (freq <= 456) {
540 fbdiv = 8;
541 postdiv = 1;
542 postdiv_en = 1;
543 } else {
544 fbdiv = 4;
545 postdiv = 0;
546 postdiv_en = 0;
547 }
548 writel(fbdiv & 0xff, PHY_REG(phy_base, 0x50));
549 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_PBDIV_BIT9_MASK,
550 (fbdiv >> 8) & 1);
551 clrsetbits_le32(PHY_REG(phy_base, 0x51), PHY_POSTDIV_EN_MASK,
552 postdiv_en << PHY_POSTDIV_EN_SHIFT);
553
554 clrsetbits_le32(PHY_REG(phy_base, 0x52),
555 PHY_PREDIV_MASK << PHY_PREDIV_SHIFT, prediv);
556 clrsetbits_le32(PHY_REG(phy_base, 0x53),
557 PHY_POSTDIV_MASK << PHY_POSTDIV_SHIFT,
558 postdiv << PHY_POSTDIV_SHIFT);
559 }
560}
561
562static const u16 d3_phy_drv_2_ohm[][2] = {
563 {PHY_DDR3_RON_455ohm, 455},
564 {PHY_DDR3_RON_230ohm, 230},
565 {PHY_DDR3_RON_153ohm, 153},
566 {PHY_DDR3_RON_115ohm, 115},
567 {PHY_DDR3_RON_91ohm, 91},
568 {PHY_DDR3_RON_76ohm, 76},
569 {PHY_DDR3_RON_65ohm, 65},
570 {PHY_DDR3_RON_57ohm, 57},
571 {PHY_DDR3_RON_51ohm, 51},
572 {PHY_DDR3_RON_46ohm, 46},
573 {PHY_DDR3_RON_41ohm, 41},
574 {PHY_DDR3_RON_38ohm, 38},
575 {PHY_DDR3_RON_35ohm, 35},
576 {PHY_DDR3_RON_32ohm, 32},
577 {PHY_DDR3_RON_30ohm, 30},
578 {PHY_DDR3_RON_28ohm, 28},
579 {PHY_DDR3_RON_27ohm, 27},
580 {PHY_DDR3_RON_25ohm, 25},
581 {PHY_DDR3_RON_24ohm, 24},
582 {PHY_DDR3_RON_23ohm, 23},
583 {PHY_DDR3_RON_22ohm, 22},
584 {PHY_DDR3_RON_21ohm, 21},
585 {PHY_DDR3_RON_20ohm, 20}
586};
587
588static u16 d3_phy_odt_2_ohm[][2] = {
589 {PHY_DDR3_RTT_DISABLE, 0},
590 {PHY_DDR3_RTT_561ohm, 561},
591 {PHY_DDR3_RTT_282ohm, 282},
592 {PHY_DDR3_RTT_188ohm, 188},
593 {PHY_DDR3_RTT_141ohm, 141},
594 {PHY_DDR3_RTT_113ohm, 113},
595 {PHY_DDR3_RTT_94ohm, 94},
596 {PHY_DDR3_RTT_81ohm, 81},
597 {PHY_DDR3_RTT_72ohm, 72},
598 {PHY_DDR3_RTT_64ohm, 64},
599 {PHY_DDR3_RTT_58ohm, 58},
600 {PHY_DDR3_RTT_52ohm, 52},
601 {PHY_DDR3_RTT_48ohm, 48},
602 {PHY_DDR3_RTT_44ohm, 44},
603 {PHY_DDR3_RTT_41ohm, 41},
604 {PHY_DDR3_RTT_38ohm, 38},
605 {PHY_DDR3_RTT_37ohm, 37},
606 {PHY_DDR3_RTT_34ohm, 34},
607 {PHY_DDR3_RTT_32ohm, 32},
608 {PHY_DDR3_RTT_31ohm, 31},
609 {PHY_DDR3_RTT_29ohm, 29},
610 {PHY_DDR3_RTT_28ohm, 28},
611 {PHY_DDR3_RTT_27ohm, 27},
612 {PHY_DDR3_RTT_25ohm, 25}
613};
614
615static u16 d4lp3_phy_drv_2_ohm[][2] = {
616 {PHY_DDR4_LPDDR3_RON_482ohm, 482},
617 {PHY_DDR4_LPDDR3_RON_244ohm, 244},
618 {PHY_DDR4_LPDDR3_RON_162ohm, 162},
619 {PHY_DDR4_LPDDR3_RON_122ohm, 122},
620 {PHY_DDR4_LPDDR3_RON_97ohm, 97},
621 {PHY_DDR4_LPDDR3_RON_81ohm, 81},
622 {PHY_DDR4_LPDDR3_RON_69ohm, 69},
623 {PHY_DDR4_LPDDR3_RON_61ohm, 61},
624 {PHY_DDR4_LPDDR3_RON_54ohm, 54},
625 {PHY_DDR4_LPDDR3_RON_48ohm, 48},
626 {PHY_DDR4_LPDDR3_RON_44ohm, 44},
627 {PHY_DDR4_LPDDR3_RON_40ohm, 40},
628 {PHY_DDR4_LPDDR3_RON_37ohm, 37},
629 {PHY_DDR4_LPDDR3_RON_34ohm, 34},
630 {PHY_DDR4_LPDDR3_RON_32ohm, 32},
631 {PHY_DDR4_LPDDR3_RON_30ohm, 30},
632 {PHY_DDR4_LPDDR3_RON_28ohm, 28},
633 {PHY_DDR4_LPDDR3_RON_27ohm, 27},
634 {PHY_DDR4_LPDDR3_RON_25ohm, 25},
635 {PHY_DDR4_LPDDR3_RON_24ohm, 24},
636 {PHY_DDR4_LPDDR3_RON_23ohm, 23},
637 {PHY_DDR4_LPDDR3_RON_22ohm, 22},
638 {PHY_DDR4_LPDDR3_RON_21ohm, 21}
639};
640
641static u16 d4lp3_phy_odt_2_ohm[][2] = {
642 {PHY_DDR4_LPDDR3_RTT_DISABLE, 0},
643 {PHY_DDR4_LPDDR3_RTT_586ohm, 586},
644 {PHY_DDR4_LPDDR3_RTT_294ohm, 294},
645 {PHY_DDR4_LPDDR3_RTT_196ohm, 196},
646 {PHY_DDR4_LPDDR3_RTT_148ohm, 148},
647 {PHY_DDR4_LPDDR3_RTT_118ohm, 118},
648 {PHY_DDR4_LPDDR3_RTT_99ohm, 99},
649 {PHY_DDR4_LPDDR3_RTT_85ohm, 58},
650 {PHY_DDR4_LPDDR3_RTT_76ohm, 76},
651 {PHY_DDR4_LPDDR3_RTT_67ohm, 67},
652 {PHY_DDR4_LPDDR3_RTT_60ohm, 60},
653 {PHY_DDR4_LPDDR3_RTT_55ohm, 55},
654 {PHY_DDR4_LPDDR3_RTT_50ohm, 50},
655 {PHY_DDR4_LPDDR3_RTT_46ohm, 46},
656 {PHY_DDR4_LPDDR3_RTT_43ohm, 43},
657 {PHY_DDR4_LPDDR3_RTT_40ohm, 40},
658 {PHY_DDR4_LPDDR3_RTT_38ohm, 38},
659 {PHY_DDR4_LPDDR3_RTT_36ohm, 36},
660 {PHY_DDR4_LPDDR3_RTT_34ohm, 34},
661 {PHY_DDR4_LPDDR3_RTT_32ohm, 32},
662 {PHY_DDR4_LPDDR3_RTT_31ohm, 31},
663 {PHY_DDR4_LPDDR3_RTT_29ohm, 29},
664 {PHY_DDR4_LPDDR3_RTT_28ohm, 28},
665 {PHY_DDR4_LPDDR3_RTT_27ohm, 27}
666};
667
668static u16 lp4_phy_drv_2_ohm[][2] = {
669 {PHY_LPDDR4_RON_501ohm, 501},
670 {PHY_LPDDR4_RON_253ohm, 253},
671 {PHY_LPDDR4_RON_168ohm, 168},
672 {PHY_LPDDR4_RON_126ohm, 126},
673 {PHY_LPDDR4_RON_101ohm, 101},
674 {PHY_LPDDR4_RON_84ohm, 84},
675 {PHY_LPDDR4_RON_72ohm, 72},
676 {PHY_LPDDR4_RON_63ohm, 63},
677 {PHY_LPDDR4_RON_56ohm, 56},
678 {PHY_LPDDR4_RON_50ohm, 50},
679 {PHY_LPDDR4_RON_46ohm, 46},
680 {PHY_LPDDR4_RON_42ohm, 42},
681 {PHY_LPDDR4_RON_38ohm, 38},
682 {PHY_LPDDR4_RON_36ohm, 36},
683 {PHY_LPDDR4_RON_33ohm, 33},
684 {PHY_LPDDR4_RON_31ohm, 31},
685 {PHY_LPDDR4_RON_29ohm, 29},
686 {PHY_LPDDR4_RON_28ohm, 28},
687 {PHY_LPDDR4_RON_26ohm, 26},
688 {PHY_LPDDR4_RON_25ohm, 25},
689 {PHY_LPDDR4_RON_24ohm, 24},
690 {PHY_LPDDR4_RON_23ohm, 23},
691 {PHY_LPDDR4_RON_22ohm, 22}
692};
693
694static u16 lp4_phy_odt_2_ohm[][2] = {
695 {PHY_LPDDR4_RTT_DISABLE, 0},
696 {PHY_LPDDR4_RTT_604ohm, 604},
697 {PHY_LPDDR4_RTT_303ohm, 303},
698 {PHY_LPDDR4_RTT_202ohm, 202},
699 {PHY_LPDDR4_RTT_152ohm, 152},
700 {PHY_LPDDR4_RTT_122ohm, 122},
701 {PHY_LPDDR4_RTT_101ohm, 101},
702 {PHY_LPDDR4_RTT_87ohm, 87},
703 {PHY_LPDDR4_RTT_78ohm, 78},
704 {PHY_LPDDR4_RTT_69ohm, 69},
705 {PHY_LPDDR4_RTT_62ohm, 62},
706 {PHY_LPDDR4_RTT_56ohm, 56},
707 {PHY_LPDDR4_RTT_52ohm, 52},
708 {PHY_LPDDR4_RTT_48ohm, 48},
709 {PHY_LPDDR4_RTT_44ohm, 44},
710 {PHY_LPDDR4_RTT_41ohm, 41},
711 {PHY_LPDDR4_RTT_39ohm, 39},
712 {PHY_LPDDR4_RTT_37ohm, 37},
713 {PHY_LPDDR4_RTT_35ohm, 35},
714 {PHY_LPDDR4_RTT_33ohm, 33},
715 {PHY_LPDDR4_RTT_32ohm, 32},
716 {PHY_LPDDR4_RTT_30ohm, 30},
717 {PHY_LPDDR4_RTT_29ohm, 29},
718 {PHY_LPDDR4_RTT_27ohm, 27}
719};
720
721static u32 lp4_odt_calc(u32 odt_ohm)
722{
723 u32 odt;
724
725 if (odt_ohm == 0)
726 odt = LPDDR4_DQODT_DIS;
727 else if (odt_ohm <= 40)
728 odt = LPDDR4_DQODT_40;
729 else if (odt_ohm <= 48)
730 odt = LPDDR4_DQODT_48;
731 else if (odt_ohm <= 60)
732 odt = LPDDR4_DQODT_60;
733 else if (odt_ohm <= 80)
734 odt = LPDDR4_DQODT_80;
735 else if (odt_ohm <= 120)
736 odt = LPDDR4_DQODT_120;
737 else
738 odt = LPDDR4_DQODT_240;
739
740 return odt;
741}
742
743static void *get_ddr_drv_odt_info(u32 dramtype)
744{
745 struct sdram_head_info_index_v2 *index =
746 (struct sdram_head_info_index_v2 *)common_info;
747 void *ddr_info = 0;
748
749 if (dramtype == DDR4)
750 ddr_info = (void *)common_info + index->ddr4_index.offset * 4;
751 else if (dramtype == DDR3)
752 ddr_info = (void *)common_info + index->ddr3_index.offset * 4;
753 else if (dramtype == LPDDR3)
754 ddr_info = (void *)common_info + index->lp3_index.offset * 4;
755 else if (dramtype == LPDDR4)
756 ddr_info = (void *)common_info + index->lp4_index.offset * 4;
757 else
758 printascii("unsupported dram type\n");
759 return ddr_info;
760}
761
762static void set_lp4_vref(struct dram_info *dram, struct lp4_info *lp4_info,
763 u32 freq_mhz, u32 dst_fsp, u32 dramtype)
764{
765 void __iomem *pctl_base = dram->pctl;
766 u32 ca_vref, dq_vref;
767
768 if (freq_mhz <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
769 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odtoff);
770 else
771 ca_vref = LP4_CA_VREF(lp4_info->vref_when_odten);
772
773 if (freq_mhz <= LP4_DQ_ODT_EN_FREQ(lp4_info->dq_odten_freq))
774 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odtoff);
775 else
776 dq_vref = LP4_DQ_VREF(lp4_info->vref_when_odten);
777
778 if (dramtype == LPDDR4) {
779 if (ca_vref < 100)
780 ca_vref = 100;
781 if (ca_vref > 420)
782 ca_vref = 420;
783
784 if (ca_vref <= 300)
785 ca_vref = (0 << 6) | (ca_vref - 100) / 4;
786 else
787 ca_vref = (1 << 6) | (ca_vref - 220) / 4;
788
789 if (dq_vref < 100)
790 dq_vref = 100;
791 if (dq_vref > 420)
792 dq_vref = 420;
793
794 if (dq_vref <= 300)
795 dq_vref = (0 << 6) | (dq_vref - 100) / 4;
796 else
797 dq_vref = (1 << 6) | (dq_vref - 220) / 4;
798 } else {
799 ca_vref = ca_vref * 11 / 6;
800 if (ca_vref < 150)
801 ca_vref = 150;
802 if (ca_vref > 629)
803 ca_vref = 629;
804
805 if (ca_vref <= 449)
806 ca_vref = (0 << 6) | (ca_vref - 150) / 4;
807 else
808 ca_vref = (1 << 6) | (ca_vref - 329) / 4;
809
810 if (dq_vref < 150)
811 dq_vref = 150;
812 if (dq_vref > 629)
813 dq_vref = 629;
814
815 if (dq_vref <= 449)
816 dq_vref = (0 << 6) | (dq_vref - 150) / 6;
817 else
818 dq_vref = (1 << 6) | (dq_vref - 329) / 6;
819 }
820 sw_set_req(dram);
821 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
822 DDR_PCTL2_INIT6,
823 PCTL2_MR_MASK << PCTL2_LPDDR4_MR12_SHIFT,
824 ca_vref << PCTL2_LPDDR4_MR12_SHIFT);
825
826 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
827 DDR_PCTL2_INIT7,
828 PCTL2_MR_MASK << PCTL2_LPDDR4_MR14_SHIFT,
829 dq_vref << PCTL2_LPDDR4_MR14_SHIFT);
830 sw_set_ack(dram);
831}
832
833static void set_ds_odt(struct dram_info *dram,
834 struct rv1126_sdram_params *sdram_params, u32 dst_fsp)
835{
836 void __iomem *phy_base = dram->phy;
837 void __iomem *pctl_base = dram->pctl;
838 u32 dramtype = sdram_params->base.dramtype;
839 struct ddr2_3_4_lp2_3_info *ddr_info;
840 struct lp4_info *lp4_info;
841 u32 i, j, tmp;
842 const u16 (*p_drv)[2];
843 const u16 (*p_odt)[2];
844 u32 drv_info, sr_info;
845 u32 phy_dq_drv_ohm, phy_clk_drv_ohm, phy_ca_drv_ohm, dram_drv_ohm;
846 u32 phy_odt_ohm, dram_odt_ohm;
847 u32 lp4_pu_cal, phy_lp4_drv_pd_en;
848 u32 phy_odt_up_en, phy_odt_dn_en;
849 u32 sr_dq, sr_clk;
850 u32 freq = sdram_params->base.ddr_freq;
851 u32 mr1_mr3, mr11, mr22, vref_out, vref_inner;
852 u32 phy_clk_drv = 0, phy_odt = 0, phy_ca_drv = 0, dram_caodt_ohm = 0;
853 u32 phy_dq_drv = 0;
854 u32 phy_odt_up = 0, phy_odt_dn = 0;
855
856 ddr_info = get_ddr_drv_odt_info(dramtype);
857 lp4_info = (void *)ddr_info;
858
859 if (!ddr_info)
860 return;
861
862 /* dram odt en freq control phy drv, dram odt and phy sr */
863 if (freq <= DRAMODT_EN_FREQ(ddr_info->odten_freq)) {
864 drv_info = ddr_info->drv_when_odtoff;
865 dram_odt_ohm = 0;
866 sr_info = ddr_info->sr_when_odtoff;
867 phy_lp4_drv_pd_en =
868 PHY_LP4_DRV_PULLDOWN_EN_ODTOFF(lp4_info->odt_info);
869 } else {
870 drv_info = ddr_info->drv_when_odten;
871 dram_odt_ohm = ODT_INFO_DRAM_ODT(ddr_info->odt_info);
872 sr_info = ddr_info->sr_when_odten;
873 phy_lp4_drv_pd_en =
874 PHY_LP4_DRV_PULLDOWN_EN_ODTEN(lp4_info->odt_info);
875 }
876 phy_dq_drv_ohm =
877 DRV_INFO_PHY_DQ_DRV(drv_info);
878 phy_clk_drv_ohm =
879 DRV_INFO_PHY_CLK_DRV(drv_info);
880 phy_ca_drv_ohm =
881 DRV_INFO_PHY_CA_DRV(drv_info);
882
883 sr_dq = DQ_SR_INFO(sr_info);
884 sr_clk = CLK_SR_INFO(sr_info);
885
886 /* phy odt en freq control dram drv and phy odt */
887 if (freq <= PHYODT_EN_FREQ(ddr_info->odten_freq)) {
888 dram_drv_ohm = DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odtoff);
889 lp4_pu_cal = LP4_DRV_PU_CAL_ODTOFF(lp4_info->odt_info);
890 phy_odt_ohm = 0;
891 phy_odt_up_en = 0;
892 phy_odt_dn_en = 0;
893 } else {
894 dram_drv_ohm =
895 DRV_INFO_DRAM_DQ_DRV(ddr_info->drv_when_odten);
896 phy_odt_ohm = ODT_INFO_PHY_ODT(ddr_info->odt_info);
897 phy_odt_up_en =
898 ODT_INFO_PULLUP_EN(ddr_info->odt_info);
899 phy_odt_dn_en =
900 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
901 lp4_pu_cal = LP4_DRV_PU_CAL_ODTEN(lp4_info->odt_info);
902 }
903
904 if (dramtype == LPDDR4) {
905 if (phy_odt_ohm) {
906 phy_odt_up_en = 0;
907 phy_odt_dn_en = 1;
908 }
909 if (freq <= LP4_CA_ODT_EN_FREQ(lp4_info->ca_odten_freq))
910 dram_caodt_ohm = 0;
911 else
912 dram_caodt_ohm =
913 ODT_INFO_LP4_CA_ODT(lp4_info->odt_info);
914 }
915
916 if (dramtype == DDR3) {
917 p_drv = d3_phy_drv_2_ohm;
918 p_odt = d3_phy_odt_2_ohm;
919 } else if (dramtype == LPDDR4) {
920 p_drv = lp4_phy_drv_2_ohm;
921 p_odt = lp4_phy_odt_2_ohm;
922 } else {
923 p_drv = d4lp3_phy_drv_2_ohm;
924 p_odt = d4lp3_phy_odt_2_ohm;
925 }
926
927 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
928 if (phy_dq_drv_ohm <= *(*(p_drv + i) + 1)) {
929 phy_dq_drv = **(p_drv + i);
930 break;
931 }
932 if (i == 0)
933 break;
934 }
935 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
936 if (phy_clk_drv_ohm <= *(*(p_drv + i) + 1)) {
937 phy_clk_drv = **(p_drv + i);
938 break;
939 }
940 if (i == 0)
941 break;
942 }
943 for (i = ARRAY_SIZE(d3_phy_drv_2_ohm) - 1; ; i--) {
944 if (phy_ca_drv_ohm <= *(*(p_drv + i) + 1)) {
945 phy_ca_drv = **(p_drv + i);
946 break;
947 }
948 if (i == 0)
949 break;
950 }
951 if (!phy_odt_ohm)
952 phy_odt = 0;
953 else
954 for (i = ARRAY_SIZE(d4lp3_phy_odt_2_ohm) - 1; ; i--) {
955 if (phy_odt_ohm <= *(*(p_odt + i) + 1)) {
956 phy_odt = **(p_odt + i);
957 break;
958 }
959 if (i == 0)
960 break;
961 }
962
963 if (dramtype != LPDDR4) {
964 if (!phy_odt_ohm || (phy_odt_up_en && phy_odt_dn_en))
965 vref_inner = 0x80;
966 else if (phy_odt_up_en)
967 vref_inner = (2 * dram_drv_ohm + phy_odt_ohm) * 128 /
968 (dram_drv_ohm + phy_odt_ohm);
969 else
970 vref_inner = phy_odt_ohm * 128 /
971 (phy_odt_ohm + dram_drv_ohm);
972
973 if (dramtype != DDR3 && dram_odt_ohm)
974 vref_out = (2 * phy_dq_drv_ohm + dram_odt_ohm) * 128 /
975 (phy_dq_drv_ohm + dram_odt_ohm);
976 else
977 vref_out = 0x80;
978 } else {
979 /* for lp4 and lp4x*/
980 if (phy_odt_ohm)
981 vref_inner =
982 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odten) *
983 256) / 1000;
984 else
985 vref_inner =
986 (PHY_LP4_DQ_VREF(lp4_info->vref_when_odtoff) *
987 256) / 1000;
988
989 vref_out = 0x80;
990 }
991
992 /* default ZQCALIB bypass mode */
993 clrsetbits_le32(PHY_REG(phy_base, 0x100), 0x1f, phy_ca_drv);
994 clrsetbits_le32(PHY_REG(phy_base, 0x101), 0x1f, phy_ca_drv);
995 clrsetbits_le32(PHY_REG(phy_base, 0x102), 0x1f, phy_clk_drv);
996 clrsetbits_le32(PHY_REG(phy_base, 0x103), 0x1f, phy_clk_drv);
997 if (dramtype == LPDDR4) {
998 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_clk_drv);
999 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_clk_drv);
1000 } else {
1001 clrsetbits_le32(PHY_REG(phy_base, 0x107), 0x1f, phy_ca_drv);
1002 clrsetbits_le32(PHY_REG(phy_base, 0x108), 0x1f, phy_ca_drv);
1003 }
1004 /* clk / cmd slew rate */
1005 clrsetbits_le32(PHY_REG(phy_base, 0x106), 0x1f, sr_clk);
1006
1007 phy_lp4_drv_pd_en = (~phy_lp4_drv_pd_en) & 1;
1008 if (phy_odt_up_en)
1009 phy_odt_up = phy_odt;
1010 if (phy_odt_dn_en)
1011 phy_odt_dn = phy_odt;
1012
1013 for (i = 0; i < 4; i++) {
1014 j = 0x110 + i * 0x10;
1015 clrsetbits_le32(PHY_REG(phy_base, j + 1), 0x1f, phy_odt_up);
1016 clrsetbits_le32(PHY_REG(phy_base, j), 0x1f, phy_odt_dn);
1017 clrsetbits_le32(PHY_REG(phy_base, j + 2), 0x1f, phy_dq_drv);
1018 clrsetbits_le32(PHY_REG(phy_base, j + 3), 0x1f, phy_dq_drv);
1019 writel(vref_inner, PHY_REG(phy_base, 0x118 + i * 0x10));
1020
1021 clrsetbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10),
1022 1 << 3, phy_lp4_drv_pd_en << 3);
1023 if (dramtype == LPDDR4)
1024 clrbits_le32(PHY_REG(phy_base, 0x114 + i * 0x10), BIT(5));
1025 /* dq slew rate */
1026 clrsetbits_le32(PHY_REG(phy_base, 0x117 + i * 0x10),
1027 0x1f, sr_dq);
1028 }
1029
1030 /* reg_rx_vref_value_update */
1031 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1032 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1033
1034 /* RAM VREF */
1035 writel(vref_out, PHY_REG(phy_base, 0x105));
1036 if (dramtype == LPDDR3)
1037 udelay(100);
1038
1039 if (dramtype == LPDDR4)
1040 set_lp4_vref(dram, lp4_info, freq, dst_fsp, dramtype);
1041
1042 if (dramtype == DDR3 || dramtype == DDR4) {
1043 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1044 DDR_PCTL2_INIT3);
1045 mr1_mr3 = mr1_mr3 >> PCTL2_DDR34_MR1_SHIFT & PCTL2_MR_MASK;
1046 } else {
1047 mr1_mr3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1048 DDR_PCTL2_INIT4);
1049 mr1_mr3 = mr1_mr3 >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK;
1050 }
1051
1052 if (dramtype == DDR3) {
1053 mr1_mr3 &= ~(DDR3_DS_MASK | DDR3_RTT_NOM_MASK);
1054 if (dram_drv_ohm == 34)
1055 mr1_mr3 |= DDR3_DS_34;
1056
1057 if (dram_odt_ohm == 0)
1058 mr1_mr3 |= DDR3_RTT_NOM_DIS;
1059 else if (dram_odt_ohm <= 40)
1060 mr1_mr3 |= DDR3_RTT_NOM_40;
1061 else if (dram_odt_ohm <= 60)
1062 mr1_mr3 |= DDR3_RTT_NOM_60;
1063 else
1064 mr1_mr3 |= DDR3_RTT_NOM_120;
1065
1066 } else if (dramtype == DDR4) {
1067 mr1_mr3 &= ~(DDR4_DS_MASK | DDR4_RTT_NOM_MASK);
1068 if (dram_drv_ohm == 48)
1069 mr1_mr3 |= DDR4_DS_48;
1070
1071 if (dram_odt_ohm == 0)
1072 mr1_mr3 |= DDR4_RTT_NOM_DIS;
1073 else if (dram_odt_ohm <= 34)
1074 mr1_mr3 |= DDR4_RTT_NOM_34;
1075 else if (dram_odt_ohm <= 40)
1076 mr1_mr3 |= DDR4_RTT_NOM_40;
1077 else if (dram_odt_ohm <= 48)
1078 mr1_mr3 |= DDR4_RTT_NOM_48;
1079 else if (dram_odt_ohm <= 60)
1080 mr1_mr3 |= DDR4_RTT_NOM_60;
1081 else
1082 mr1_mr3 |= DDR4_RTT_NOM_120;
1083
1084 } else if (dramtype == LPDDR3) {
1085 if (dram_drv_ohm <= 34)
1086 mr1_mr3 |= LPDDR3_DS_34;
1087 else if (dram_drv_ohm <= 40)
1088 mr1_mr3 |= LPDDR3_DS_40;
1089 else if (dram_drv_ohm <= 48)
1090 mr1_mr3 |= LPDDR3_DS_48;
1091 else if (dram_drv_ohm <= 60)
1092 mr1_mr3 |= LPDDR3_DS_60;
1093 else if (dram_drv_ohm <= 80)
1094 mr1_mr3 |= LPDDR3_DS_80;
1095
1096 if (dram_odt_ohm == 0)
1097 lp3_odt_value = LPDDR3_ODT_DIS;
1098 else if (dram_odt_ohm <= 60)
1099 lp3_odt_value = LPDDR3_ODT_60;
1100 else if (dram_odt_ohm <= 120)
1101 lp3_odt_value = LPDDR3_ODT_120;
1102 else
1103 lp3_odt_value = LPDDR3_ODT_240;
1104 } else {/* for lpddr4 and lpddr4x */
1105 /* MR3 for lp4 PU-CAL and PDDS */
1106 mr1_mr3 &= ~(LPDDR4_PDDS_MASK | LPDDR4_PU_CAL_MASK);
1107 mr1_mr3 |= lp4_pu_cal;
1108
1109 tmp = lp4_odt_calc(dram_drv_ohm);
1110 if (!tmp)
1111 tmp = LPDDR4_PDDS_240;
1112 mr1_mr3 |= (tmp << LPDDR4_PDDS_SHIFT);
1113
1114 /* MR11 for lp4 ca odt, dq odt set */
1115 mr11 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1116 DDR_PCTL2_INIT6);
1117 mr11 = mr11 >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK;
1118
1119 mr11 &= ~(LPDDR4_DQODT_MASK | LPDDR4_CAODT_MASK);
1120
1121 tmp = lp4_odt_calc(dram_odt_ohm);
1122 mr11 |= (tmp << LPDDR4_DQODT_SHIFT);
1123
1124 tmp = lp4_odt_calc(dram_caodt_ohm);
1125 mr11 |= (tmp << LPDDR4_CAODT_SHIFT);
1126 sw_set_req(dram);
1127 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1128 DDR_PCTL2_INIT6,
1129 PCTL2_MR_MASK << PCTL2_LPDDR4_MR11_SHIFT,
1130 mr11 << PCTL2_LPDDR4_MR11_SHIFT);
1131 sw_set_ack(dram);
1132
1133 /* MR22 for soc odt/odt-ck/odt-cs/odt-ca */
1134 mr22 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1135 DDR_PCTL2_INIT7);
1136 mr22 = mr22 >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK;
1137 mr22 &= ~LPDDR4_SOC_ODT_MASK;
1138
1139 tmp = lp4_odt_calc(phy_odt_ohm);
1140 mr22 |= tmp;
1141 mr22 = mr22 |
1142 (LP4_ODTE_CK_EN(lp4_info->cs_drv_ca_odt_info) <<
1143 LPDDR4_ODTE_CK_SHIFT) |
1144 (LP4_ODTE_CS_EN(lp4_info->cs_drv_ca_odt_info) <<
1145 LPDDR4_ODTE_CS_SHIFT) |
1146 (LP4_ODTD_CA_EN(lp4_info->cs_drv_ca_odt_info) <<
1147 LPDDR4_ODTD_CA_SHIFT);
1148
1149 sw_set_req(dram);
1150 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1151 DDR_PCTL2_INIT7,
1152 PCTL2_MR_MASK << PCTL2_LPDDR4_MR22_SHIFT,
1153 mr22 << PCTL2_LPDDR4_MR22_SHIFT);
1154 sw_set_ack(dram);
1155 }
1156
1157 if (dramtype == DDR4 || dramtype == DDR3) {
1158 sw_set_req(dram);
1159 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1160 DDR_PCTL2_INIT3,
1161 PCTL2_MR_MASK << PCTL2_DDR34_MR1_SHIFT,
1162 mr1_mr3 << PCTL2_DDR34_MR1_SHIFT);
1163 sw_set_ack(dram);
1164 } else {
1165 sw_set_req(dram);
1166 clrsetbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
1167 DDR_PCTL2_INIT4,
1168 PCTL2_MR_MASK << PCTL2_LPDDR234_MR3_SHIFT,
1169 mr1_mr3 << PCTL2_LPDDR234_MR3_SHIFT);
1170 sw_set_ack(dram);
1171 }
1172}
1173
1174static int sdram_cmd_dq_path_remap(struct dram_info *dram,
1175 struct rv1126_sdram_params *sdram_params)
1176{
1177 void __iomem *phy_base = dram->phy;
1178 u32 dramtype = sdram_params->base.dramtype;
1179 struct sdram_head_info_index_v2 *index =
1180 (struct sdram_head_info_index_v2 *)common_info;
1181 struct dq_map_info *map_info;
1182
1183 map_info = (struct dq_map_info *)((void *)common_info +
1184 index->dq_map_index.offset * 4);
1185
1186 if (dramtype <= LPDDR4)
1187 writel((map_info->byte_map[dramtype / 4] >>
1188 ((dramtype % 4) * 8)) & 0xff,
1189 PHY_REG(phy_base, 0x4f));
1190
1191 return 0;
1192}
1193
1194static void phy_cfg(struct dram_info *dram,
1195 struct rv1126_sdram_params *sdram_params)
1196{
1197 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
1198 void __iomem *phy_base = dram->phy;
1199 u32 i, dq_map, tmp;
1200 u32 byte1 = 0, byte0 = 0;
1201
1202 sdram_cmd_dq_path_remap(dram, sdram_params);
1203
1204 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 0);
1205 for (i = 0; sdram_params->phy_regs.phy[i][0] != 0xFFFFFFFF; i++) {
1206 writel(sdram_params->phy_regs.phy[i][1],
1207 phy_base + sdram_params->phy_regs.phy[i][0]);
1208 }
1209
1210 clrbits_le32(PHY_REG(phy_base, 0x62), BIT(5));
1211 dq_map = readl(PHY_REG(phy_base, 0x4f));
1212 for (i = 0; i < 4; i++) {
1213 if (((dq_map >> (i * 2)) & 0x3) == 0)
1214 byte0 = i;
1215 if (((dq_map >> (i * 2)) & 0x3) == 1)
1216 byte1 = i;
1217 }
1218
1219 tmp = readl(PHY_REG(phy_base, 0xf)) & (~PHY_DQ_WIDTH_MASK);
1220 if (cap_info->bw == 2)
1221 tmp |= 0xf;
1222 else if (cap_info->bw == 1)
1223 tmp |= ((1 << byte0) | (1 << byte1));
1224 else
1225 tmp |= (1 << byte0);
1226
1227 writel(tmp, PHY_REG(phy_base, 0xf));
1228
1229 /* lpddr4 odt control by phy, enable cs0 odt */
1230 if (sdram_params->base.dramtype == LPDDR4)
1231 clrsetbits_le32(PHY_REG(phy_base, 0x20), 0x7 << 4,
1232 (1 << 6) | (1 << 4));
1233 /* for ca training ca vref choose range1 */
1234 setbits_le32(PHY_REG(phy_base, 0x1e), BIT(6));
1235 setbits_le32(PHY_REG(phy_base, 0x1f), BIT(6));
1236 /* for wr training PHY_0x7c[5], choose range0 */
1237 clrbits_le32(PHY_REG(phy_base, 0x7c), BIT(5));
1238}
1239
1240static int update_refresh_reg(struct dram_info *dram)
1241{
1242 void __iomem *pctl_base = dram->pctl;
1243 u32 ret;
1244
1245 ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
1246 writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
1247
1248 return 0;
1249}
1250
1251/*
1252 * rank = 1: cs0
1253 * rank = 2: cs1
1254 */
1255int read_mr(struct dram_info *dram, u32 rank, u32 mr_num, u32 dramtype)
1256{
1257 u32 ret;
1258 u32 i, temp;
1259 u32 dqmap;
1260
1261 void __iomem *pctl_base = dram->pctl;
1262 struct sdram_head_info_index_v2 *index =
1263 (struct sdram_head_info_index_v2 *)common_info;
1264 struct dq_map_info *map_info;
1265
1266 map_info = (struct dq_map_info *)((void *)common_info +
1267 index->dq_map_index.offset * 4);
1268
1269 if (dramtype == LPDDR2)
1270 dqmap = map_info->lp2_dq0_7_map;
1271 else
1272 dqmap = map_info->lp3_dq0_7_map;
1273
1274 pctl_read_mr(pctl_base, rank, mr_num);
1275
1276 ret = (readl(&dram->ddrgrf->ddr_grf_status[0]) & 0xff);
1277
1278 if (dramtype != LPDDR4) {
1279 temp = 0;
1280 for (i = 0; i < 8; i++) {
1281 temp = temp | (((ret >> i) & 0x1) <<
1282 ((dqmap >> (i * 4)) & 0xf));
1283 }
1284 } else {
1285 temp = (readl(&dram->ddrgrf->ddr_grf_status[1]) & 0xff);
1286 }
1287
1288 return temp;
1289}
1290
1291/* before call this function autorefresh should be disabled */
1292void send_a_refresh(struct dram_info *dram)
1293{
1294 void __iomem *pctl_base = dram->pctl;
1295
1296 while (readl(pctl_base + DDR_PCTL2_DBGSTAT) & 0x3)
1297 continue;
1298 writel(0x3, pctl_base + DDR_PCTL2_DBGCMD);
1299}
1300
1301static void enter_sr(struct dram_info *dram, u32 en)
1302{
1303 void __iomem *pctl_base = dram->pctl;
1304
1305 if (en) {
1306 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1307 while (1) {
1308 if (((readl(pctl_base + DDR_PCTL2_STAT) &
1309 PCTL2_SELFREF_TYPE_MASK) ==
1310 PCTL2_SELFREF_TYPE_SR_NOT_AUTO) &&
1311 ((readl(pctl_base + DDR_PCTL2_STAT) &
1312 PCTL2_OPERATING_MODE_MASK) ==
1313 PCTL2_OPERATING_MODE_SR))
1314 break;
1315 }
1316 } else {
1317 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, PCTL2_SELFREF_SW);
1318 while ((readl(pctl_base + DDR_PCTL2_STAT) &
1319 PCTL2_OPERATING_MODE_MASK) == PCTL2_OPERATING_MODE_SR)
1320 continue;
1321 }
1322}
1323
1324void record_dq_prebit(struct dram_info *dram)
1325{
1326 u32 group, i, tmp;
1327 void __iomem *phy_base = dram->phy;
1328
1329 for (group = 0; group < 4; group++) {
1330 for (i = 0; i < ARRAY_SIZE(dq_sel); i++) {
1331 /* l_loop_invdelaysel */
1332 writel(dq_sel[i][0], PHY_REG(phy_base,
1333 grp_addr[group] + 0x2c));
1334 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2e));
1335 writel(tmp, PHY_REG(phy_base,
1336 grp_addr[group] + dq_sel[i][1]));
1337
1338 /* r_loop_invdelaysel */
1339 writel(dq_sel[i][0], PHY_REG(phy_base,
1340 grp_addr[group] + 0x2d));
1341 tmp = readl(PHY_REG(phy_base, grp_addr[group] + 0x2f));
1342 writel(tmp, PHY_REG(phy_base,
1343 grp_addr[group] + dq_sel[i][2]));
1344 }
1345 }
1346}
1347
1348static void update_dq_rx_prebit(struct dram_info *dram)
1349{
1350 void __iomem *phy_base = dram->phy;
1351
1352 clrsetbits_le32(PHY_REG(phy_base, 0x70), BIT(1) | BIT(6) | BIT(4),
1353 BIT(4));
1354 udelay(1);
1355 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(4));
1356}
1357
1358static void update_dq_tx_prebit(struct dram_info *dram)
1359{
1360 void __iomem *phy_base = dram->phy;
1361
1362 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1363 setbits_le32(PHY_REG(phy_base, 0x2), BIT(3));
1364 setbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1365 udelay(1);
1366 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(6));
1367}
1368
1369static void update_ca_prebit(struct dram_info *dram)
1370{
1371 void __iomem *phy_base = dram->phy;
1372
1373 clrbits_le32(PHY_REG(phy_base, 0x25), BIT(2));
1374 setbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1375 udelay(1);
1376 clrbits_le32(PHY_REG(phy_base, 0x22), BIT(6));
1377}
1378
1379/*
1380 * dir: 0: de-skew = delta_*
1381 * 1: de-skew = reg val - delta_*
1382 * delta_dir: value for differential signal: clk/
1383 * delta_sig: value for single signal: ca/cmd
1384 */
1385static void modify_ca_deskew(struct dram_info *dram, u32 dir, int delta_dif,
1386 int delta_sig, u32 cs, u32 dramtype)
1387{
1388 void __iomem *phy_base = dram->phy;
1389 u32 i, cs_en, tmp;
1390 u32 dfi_lp_stat = 0;
1391
1392 if (cs == 0)
1393 cs_en = 1;
1394 else if (cs == 2)
1395 cs_en = 2;
1396 else
1397 cs_en = 3;
1398
1399 if (dramtype == LPDDR4 &&
1400 ((readl(PHY_REG(phy_base, 0x60)) & BIT(5)) == 0)) {
1401 dfi_lp_stat = 1;
1402 setbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1403 }
1404 enter_sr(dram, 1);
1405
1406 for (i = 0; i < 0x20; i++) {
1407 if (dir == DESKEW_MDF_ABS_VAL)
1408 tmp = delta_sig;
1409 else
1410 tmp = readl(PHY_REG(phy_base, 0x150 + i)) +
1411 delta_sig;
1412 writel(tmp, PHY_REG(phy_base, 0x150 + i));
1413 }
1414
1415 if (dir == DESKEW_MDF_ABS_VAL)
1416 tmp = delta_dif;
1417 else
1418 tmp = readl(PHY_REG(phy_base, 0x150 + 0x17)) -
1419 delta_sig + delta_dif;
1420 writel(tmp, PHY_REG(phy_base, 0x150 + 0x17));
1421 writel(tmp, PHY_REG(phy_base, 0x150 + 0x18));
1422 if (dramtype == LPDDR4) {
1423 writel(tmp, PHY_REG(phy_base, 0x150 + 0x4));
1424 writel(tmp, PHY_REG(phy_base, 0x150 + 0xa));
1425
1426 clrbits_le32(PHY_REG(phy_base, 0x10), cs_en << 6);
1427 update_ca_prebit(dram);
1428 }
1429 enter_sr(dram, 0);
1430
1431 if (dfi_lp_stat)
1432 clrbits_le32(PHY_REG(phy_base, 0x60), BIT(5));
1433}
1434
1435static u32 get_min_value(struct dram_info *dram, u32 signal, u32 rank)
1436{
1437 u32 i, j, offset = 0;
1438 u32 min = 0x3f;
1439 void __iomem *phy_base = dram->phy;
1440 u32 byte_en;
1441
1442 if (signal == SKEW_TX_SIGNAL)
1443 offset = 8;
1444
1445 if (signal == SKEW_CA_SIGNAL) {
1446 for (i = 0; i < 0x20; i++)
1447 min = MIN(min, readl(PHY_REG(phy_base, 0x150 + i)));
1448 } else {
1449 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1450 for (j = offset; j < offset + rank * 4; j++) {
1451 if (!((byte_en >> (j % 4)) & 1))
1452 continue;
1453 for (i = 0; i < 11; i++)
1454 min = MIN(min,
1455 readl(PHY_REG(phy_base,
1456 dqs_dq_skew_adr[j] +
1457 i)));
1458 }
1459 }
1460
1461 return min;
1462}
1463
1464static u32 low_power_update(struct dram_info *dram, u32 en)
1465{
1466 void __iomem *pctl_base = dram->pctl;
1467 u32 lp_stat = 0;
1468
1469 if (en) {
1470 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, en & 0xf);
1471 } else {
1472 lp_stat = readl(pctl_base + DDR_PCTL2_PWRCTL) & 0xf;
1473 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 0xf);
1474 }
1475
1476 return lp_stat;
1477}
1478
1479/*
1480 * signal:
1481 * dir: 0: de-skew = delta_*
1482 * 1: de-skew = reg val - delta_*
1483 * delta_dir: value for differential signal: dqs
1484 * delta_sig: value for single signal: dq/dm
1485 */
1486static void modify_dq_deskew(struct dram_info *dram, u32 signal, u32 dir,
1487 int delta_dif, int delta_sig, u32 rank)
1488{
1489 void __iomem *phy_base = dram->phy;
1490 u32 i, j, tmp, offset;
1491 u32 byte_en;
1492
1493 byte_en = readl(PHY_REG(phy_base, 0xf)) & 0xf;
1494
1495 if (signal == SKEW_RX_SIGNAL)
1496 offset = 0;
1497 else
1498 offset = 8;
1499
1500 for (j = offset; j < (offset + rank * 4); j++) {
1501 if (!((byte_en >> (j % 4)) & 1))
1502 continue;
1503 for (i = 0; i < 0x9; i++) {
1504 if (dir == DESKEW_MDF_ABS_VAL)
1505 tmp = delta_sig;
1506 else
1507 tmp = delta_sig + readl(PHY_REG(phy_base,
1508 dqs_dq_skew_adr[j] +
1509 i));
1510 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + i));
1511 }
1512 if (dir == DESKEW_MDF_ABS_VAL)
1513 tmp = delta_dif;
1514 else
1515 tmp = delta_dif + readl(PHY_REG(phy_base,
1516 dqs_dq_skew_adr[j] + 9));
1517 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 9));
1518 writel(tmp, PHY_REG(phy_base, dqs_dq_skew_adr[j] + 0xa));
1519 }
1520 if (signal == SKEW_RX_SIGNAL)
1521 update_dq_rx_prebit(dram);
1522 else
1523 update_dq_tx_prebit(dram);
1524}
1525
1526static int data_training_rg(struct dram_info *dram, u32 cs, u32 dramtype)
1527{
1528 void __iomem *phy_base = dram->phy;
1529 u32 ret;
1530 u32 dis_auto_zq = 0;
1531 u32 odt_val_up, odt_val_dn;
1532 u32 i, j;
1533
1534 odt_val_dn = readl(PHY_REG(phy_base, 0x110));
1535 odt_val_up = readl(PHY_REG(phy_base, 0x111));
1536
1537 if (dramtype != LPDDR4) {
1538 for (i = 0; i < 4; i++) {
1539 j = 0x110 + i * 0x10;
1540 writel(PHY_DDR4_LPDDR3_RTT_294ohm,
1541 PHY_REG(phy_base, j));
1542 writel(PHY_DDR4_LPDDR3_RTT_DISABLE,
1543 PHY_REG(phy_base, j + 0x1));
1544 }
1545 }
1546 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1547 /* use normal read mode for data training */
1548 clrbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1549
1550 if (dramtype == DDR4)
1551 setbits_le32(PHY_REG(phy_base, 0xc), BIT(1));
1552
1553 /* choose training cs */
1554 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs));
1555 /* enable gate training */
1556 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 1);
1557 udelay(50);
1558 ret = readl(PHY_REG(phy_base, 0x91));
1559 /* disable gate training */
1560 clrsetbits_le32(PHY_REG(phy_base, 2), 0x33, (0x20 >> cs) | 0);
1561 clrbits_le32(PHY_REG(phy_base, 2), 0x30);
1562 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1563
1564 ret = (ret & 0x2f) ^ (readl(PHY_REG(phy_base, 0xf)) & 0xf);
1565
1566 if (dramtype != LPDDR4) {
1567 for (i = 0; i < 4; i++) {
1568 j = 0x110 + i * 0x10;
1569 writel(odt_val_dn, PHY_REG(phy_base, j));
1570 writel(odt_val_up, PHY_REG(phy_base, j + 0x1));
1571 }
1572 }
1573 return ret;
1574}
1575
1576static int data_training_wl(struct dram_info *dram, u32 cs, u32 dramtype,
1577 u32 rank)
1578{
1579 void __iomem *pctl_base = dram->pctl;
1580 void __iomem *phy_base = dram->phy;
1581 u32 dis_auto_zq = 0;
1582 u32 tmp;
1583 u32 cur_fsp;
1584 u32 timeout_us = 1000;
1585
1586 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1587
1588 clrbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1589
1590 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1591 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_INIT3) &
1592 0xffff;
1593 writel(tmp & 0xff, PHY_REG(phy_base, 0x3));
1594
1595 /* disable another cs's output */
1596 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1597 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp | (1 << 12),
1598 dramtype);
1599 if (dramtype == DDR3 || dramtype == DDR4)
1600 writel(0x40 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1601 else
1602 writel(0x80 | ((tmp >> 8) & 0x3f), PHY_REG(phy_base, 0x4));
1603
1604 /* choose cs */
1605 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1606 ((0x2 >> cs) << 6) | (0 << 2));
1607 /* enable write leveling */
1608 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1609 ((0x2 >> cs) << 6) | (1 << 2));
1610
1611 while (1) {
1612 if ((readl(PHY_REG(phy_base, 0x92)) & 0xf) ==
1613 (readl(PHY_REG(phy_base, 0xf)) & 0xf))
1614 break;
1615
1616 udelay(1);
1617 if (timeout_us-- == 0) {
1618 printascii("error: write leveling timeout\n");
1619 while (1)
1620 ;
1621 }
1622 }
1623
1624 /* disable write leveling */
1625 clrsetbits_le32(PHY_REG(phy_base, 2), (0x3 << 6) | (0x3 << 2),
1626 ((0x2 >> cs) << 6) | (0 << 2));
1627 clrsetbits_le32(PHY_REG(phy_base, 2), 0x3 << 6, 0 << 6);
1628
1629 /* enable another cs's output */
1630 if ((dramtype == DDR3 || dramtype == DDR4) && rank == 2)
1631 pctl_write_mr(dram->pctl, (cs + 1) & 1, 1, tmp & ~(1 << 12),
1632 dramtype);
1633
1634 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1635
1636 return 0;
1637}
1638
1639char pattern[32] = {
1640 0xaa, 0x55, 0xaa, 0x55, 0x55, 0xaa, 0x55, 0xaa,
1641 0x55, 0xaa, 0x55, 0xaa, 0xaa, 0x55, 0xaa, 0x55,
1642 0x55, 0x55, 0xaa, 0xaa, 0xaa, 0xaa, 0x55, 0x55,
1643 0xaa, 0xaa, 0x55, 0x55, 0x55, 0x55, 0xaa, 0xaa
1644};
1645
1646static int data_training_rd(struct dram_info *dram, u32 cs, u32 dramtype,
1647 u32 mhz)
1648{
1649 void __iomem *pctl_base = dram->pctl;
1650 void __iomem *phy_base = dram->phy;
1651 u32 trefi_1x, trfc_1x;
1652 u32 dis_auto_zq = 0;
1653 u32 timeout_us = 1000;
1654 u32 dqs_default;
1655 u32 cur_fsp;
1656 u32 vref_inner;
1657 u32 i;
1658 struct sdram_head_info_index_v2 *index =
1659 (struct sdram_head_info_index_v2 *)common_info;
1660 struct dq_map_info *map_info;
1661
1662 vref_inner = readl(PHY_REG(phy_base, 0x128)) & 0xff;
1663 if (dramtype == DDR3 && vref_inner == 0x80) {
1664 for (i = 0; i < 4; i++)
1665 writel(vref_inner - 0xa,
1666 PHY_REG(phy_base, 0x118 + i * 0x10));
1667
1668 /* reg_rx_vref_value_update */
1669 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1670 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1671 }
1672
1673 map_info = (struct dq_map_info *)((void *)common_info +
1674 index->dq_map_index.offset * 4);
1675 /* only 1cs a time, 0:cs0 1 cs1 */
1676 if (cs > 1)
1677 return -1;
1678
1679 dqs_default = 0xf;
1680 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1681
1682 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1683 /* config refresh timing */
1684 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1685 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1686 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1687 DDR_PCTL2_RFSHTMG) & 0x3ff;
1688 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1689 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1690 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1691 /* reg_phy_trfc */
1692 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1693 /* reg_max_refi_cnt */
1694 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1695
1696 /* choose training cs */
1697 clrsetbits_le32(PHY_REG(phy_base, 0x71), 0x3 << 6, (0x2 >> cs) << 6);
1698
1699 /* set dq map for ddr4 */
1700 if (dramtype == DDR4) {
1701 setbits_le32(PHY_REG(phy_base, 0x70), BIT(7));
1702 for (i = 0; i < 4; i++) {
1703 writel((map_info->ddr4_dq_map[cs * 2] >>
1704 ((i % 4) * 8)) & 0xff,
1705 PHY_REG(phy_base, 0x238 + i));
1706 writel((map_info->ddr4_dq_map[cs * 2 + 1] >>
1707 ((i % 4) * 8)) & 0xff,
1708 PHY_REG(phy_base, 0x2b8 + i));
1709 }
1710 }
1711
1712 /* cha_l reg_l_rd_train_dqs_default[5:0] */
1713 clrsetbits_le32(PHY_REG(phy_base, 0x230), 0x3f, dqs_default);
1714 /* cha_h reg_h_rd_train_dqs_default[5:0] */
1715 clrsetbits_le32(PHY_REG(phy_base, 0x234), 0x3f, dqs_default);
1716 /* chb_l reg_l_rd_train_dqs_default[5:0] */
1717 clrsetbits_le32(PHY_REG(phy_base, 0x2b0), 0x3f, dqs_default);
1718 /* chb_h reg_h_rd_train_dqs_default[5:0] */
1719 clrsetbits_le32(PHY_REG(phy_base, 0x2b4), 0x3f, dqs_default);
1720
1721 /* Choose the read train auto mode */
1722 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x1);
1723 /* Enable the auto train of the read train */
1724 clrsetbits_le32(PHY_REG(phy_base, 0x70), 0x3, 0x3);
1725
1726 /* Wait the train done. */
1727 while (1) {
1728 if ((readl(PHY_REG(phy_base, 0x93)) >> 7) & 0x1)
1729 break;
1730
1731 udelay(1);
1732 if (timeout_us-- == 0) {
1733 printascii("error: read training timeout\n");
1734 return -1;
1735 }
1736 }
1737
1738 /* Check the read train state */
1739 if ((readl(PHY_REG(phy_base, 0x240)) & 0x3) ||
1740 (readl(PHY_REG(phy_base, 0x2c0)) & 0x3)) {
1741 printascii("error: read training error\n");
1742 return -1;
1743 }
1744
1745 /* Exit the Read Training by setting */
1746 clrbits_le32(PHY_REG(phy_base, 0x70), BIT(1));
1747
1748 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1749
1750 if (dramtype == DDR3 && vref_inner == 0x80) {
1751 for (i = 0; i < 4; i++)
1752 writel(vref_inner,
1753 PHY_REG(phy_base, 0x118 + i * 0x10));
1754
1755 /* reg_rx_vref_value_update */
1756 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1757 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
1758 }
1759
1760 return 0;
1761}
1762
1763static int data_training_wr(struct dram_info *dram, u32 cs, u32 dramtype,
1764 u32 mhz, u32 dst_fsp)
1765{
1766 void __iomem *pctl_base = dram->pctl;
1767 void __iomem *phy_base = dram->phy;
1768 u32 trefi_1x, trfc_1x;
1769 u32 dis_auto_zq = 0;
1770 u32 timeout_us = 1000;
1771 u32 cur_fsp;
1772 u32 mr_tmp, cl, cwl, phy_fsp, offset = 0;
1773
1774 if (dramtype == LPDDR3 && mhz <= 400) {
1775 phy_fsp = (readl(PHY_REG(phy_base, 0xc)) >> 0x2) & 0x3;
1776 offset = (phy_fsp == 0) ? 0x5 : 0x387 + (phy_fsp - 1) * 3;
1777 cl = readl(PHY_REG(phy_base, offset));
1778 cwl = readl(PHY_REG(phy_base, offset + 2));
1779
1780 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, 0x8);
1781 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, 0x4);
1782 pctl_write_mr(dram->pctl, 3, 2, 0x6, dramtype);
1783 }
1784
1785 dis_auto_zq = pctl_dis_zqcs_aref(dram->pctl);
1786
1787 /* PHY_0x7b[7:0] reg_train_col_addr[7:0] */
1788 clrsetbits_le32(PHY_REG(phy_base, 0x7b), 0xff, 0x0);
1789 /* PHY_0x7c[4:2] reg_train_ba_addr[2:0] */
1790 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x7 << 2, 0x0 << 2);
1791 /* PHY_0x7c[1:0] reg_train_col_addr[9:8] */
1792 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3, 0x0);
1793 /* PHY_0x7d[7:0] reg_train_row_addr[7:0] */
1794 clrsetbits_le32(PHY_REG(phy_base, 0x7d), 0xff, 0x0);
1795 /* PHY_0x7e[7:0] reg_train_row_addr[15:8] */
1796 clrsetbits_le32(PHY_REG(phy_base, 0x7e), 0xff, 0x0);
1797
1798 /* PHY_0x71[3] wrtrain_check_data_value_random_gen */
1799 clrbits_le32(PHY_REG(phy_base, 0x71), BIT(3));
1800
1801 /* config refresh timing */
1802 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
1803 trefi_1x = ((readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1804 DDR_PCTL2_RFSHTMG) >> 16) & 0xfff) * 32;
1805 trfc_1x = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1806 DDR_PCTL2_RFSHTMG) & 0x3ff;
1807 /* reg_phy_trefi[7:0] and reg_phy_trefi[13:8] */
1808 clrsetbits_le32(PHY_REG(phy_base, 0x6e), 0xff, trefi_1x & 0xff);
1809 clrsetbits_le32(PHY_REG(phy_base, 0x6f), 0x3f, (trefi_1x >> 8) & 0x3f);
1810 /* reg_phy_trfc */
1811 clrsetbits_le32(PHY_REG(phy_base, 0x57), 0xff, trfc_1x);
1812 /* reg_max_refi_cnt */
1813 clrsetbits_le32(PHY_REG(phy_base, 0x61), 0xf << 4, 0x8 << 4);
1814
1815 /* choose training cs */
1816 clrsetbits_le32(PHY_REG(phy_base, 0x7c), 0x3 << 6, (0x2 >> cs) << 6);
1817
1818 /* PHY_0x7a [4] reg_wr_train_dqs_default_bypass */
1819 /* 0: Use the write-leveling value. */
1820 /* 1: use reg0x233 0x237 0x2b3 0x2b7 */
1821 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(4));
1822
1823 /* PHY_0x7a [0] reg_dq_wr_train_auto */
1824 setbits_le32(PHY_REG(phy_base, 0x7a), 0x1);
1825
1826 /* PHY_0x7a [1] reg_dq_wr_train_en */
1827 setbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1828
1829 send_a_refresh(dram);
1830
1831 while (1) {
1832 if ((readl(PHY_REG(phy_base, 0x92)) >> 7) & 0x1)
1833 break;
1834
1835 udelay(1);
1836 if (timeout_us-- == 0) {
1837 printascii("error: write training timeout\n");
1838 while (1)
1839 ;
1840 }
1841 }
1842
1843 /* Check the write train state */
1844 if ((readl(PHY_REG(phy_base, 0x90)) >> 5) & 0x7) {
1845 printascii("error: write training error\n");
1846 return -1;
1847 }
1848
1849 /* PHY_0x7a [1] reg_dq_wr_train_en */
1850 clrbits_le32(PHY_REG(phy_base, 0x7a), BIT(1));
1851
1852 pctl_rest_zqcs_aref(dram->pctl, dis_auto_zq);
1853
1854 /* save LPDDR4 write vref to fsp_param for dfs */
1855 if (dramtype == LPDDR4) {
1856 fsp_param[dst_fsp].vref_dq[cs] =
1857 ((readl(PHY_REG(phy_base, 0x384)) & 0x3f) +
1858 (readl(PHY_REG(phy_base, 0x385)) & 0x3f)) / 2;
1859 /* add range info */
1860 fsp_param[dst_fsp].vref_dq[cs] |=
1861 ((readl(PHY_REG(phy_base, 0x7c)) & BIT(5)) << 1);
1862 }
1863
1864 if (dramtype == LPDDR3 && mhz <= 400) {
1865 clrsetbits_le32(PHY_REG(phy_base, offset), 0x1f, cl);
1866 clrsetbits_le32(PHY_REG(phy_base, offset + 2), 0x1f, cwl);
1867 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
1868 DDR_PCTL2_INIT3);
1869 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
1870 dramtype);
1871 }
1872
1873 return 0;
1874}
1875
1876static int data_training(struct dram_info *dram, u32 cs,
1877 struct rv1126_sdram_params *sdram_params, u32 dst_fsp,
1878 u32 training_flag)
1879{
1880 u32 ret = 0;
1881
1882 if (training_flag == FULL_TRAINING)
1883 training_flag = READ_GATE_TRAINING | WRITE_LEVELING |
1884 WRITE_TRAINING | READ_TRAINING;
1885
1886 if ((training_flag & WRITE_LEVELING) == WRITE_LEVELING) {
1887 ret = data_training_wl(dram, cs,
1888 sdram_params->base.dramtype,
1889 sdram_params->ch.cap_info.rank);
1890 if (ret != 0)
1891 goto out;
1892 }
1893
1894 if ((training_flag & READ_GATE_TRAINING) == READ_GATE_TRAINING) {
1895 ret = data_training_rg(dram, cs,
1896 sdram_params->base.dramtype);
1897 if (ret != 0)
1898 goto out;
1899 }
1900
1901 if ((training_flag & READ_TRAINING) == READ_TRAINING) {
1902 ret = data_training_rd(dram, cs,
1903 sdram_params->base.dramtype,
1904 sdram_params->base.ddr_freq);
1905 if (ret != 0)
1906 goto out;
1907 }
1908
1909 if ((training_flag & WRITE_TRAINING) == WRITE_TRAINING) {
1910 ret = data_training_wr(dram, cs,
1911 sdram_params->base.dramtype,
1912 sdram_params->base.ddr_freq, dst_fsp);
1913 if (ret != 0)
1914 goto out;
1915 }
1916
1917out:
1918 return ret;
1919}
1920
1921static int get_wrlvl_val(struct dram_info *dram,
1922 struct rv1126_sdram_params *sdram_params)
1923{
1924 int i, j, clk_skew;
1925 void __iomem *phy_base = dram->phy;
1926 u32 lp_stat;
1927 int ret;
1928
1929 lp_stat = low_power_update(dram, 0);
1930
1931 clk_skew = 0x1f;
1932 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, clk_skew, 3,
1933 sdram_params->base.dramtype);
1934
1935 ret = data_training(dram, 0, sdram_params, 0, WRITE_LEVELING);
1936 if (sdram_params->ch.cap_info.rank == 2)
1937 ret |= data_training(dram, 1, sdram_params, 0, WRITE_LEVELING);
1938
1939 for (j = 0; j < 2; j++)
1940 for (i = 0; i < 4; i++)
1941 wrlvl_result[j][i] =
1942 (readl(PHY_REG(phy_base, wrlvl_result_offset[j][i])) & 0x3f) -
1943 clk_skew;
1944
1945 low_power_update(dram, lp_stat);
1946
1947 return ret;
1948}
1949
1950#if defined(CONFIG_CMD_DDR_TEST_TOOL)
1951static void init_rw_trn_result_struct(struct rw_trn_result *result,
1952 void __iomem *phy_base, u8 cs_num)
1953{
1954 int i;
1955
1956 result->cs_num = cs_num;
1957 result->byte_en = readb(PHY_REG(dram_info.phy, 0xf)) &
1958 PHY_DQ_WIDTH_MASK;
1959 for (i = 0; i < FSP_NUM; i++)
1960 result->fsp_mhz[i] = 0;
1961}
1962
1963static void save_rw_trn_min_max(void __iomem *phy_base,
1964 struct cs_rw_trn_result *rd_result,
1965 struct cs_rw_trn_result *wr_result,
1966 u8 byte_en)
1967{
1968 u16 phy_ofs;
1969 u8 dqs;
1970 u8 dq;
1971
1972 for (dqs = 0; dqs < BYTE_NUM; dqs++) {
1973 if ((byte_en & BIT(dqs)) == 0)
1974 continue;
1975
1976 /* Channel A or B (low or high 16 bit) */
1977 phy_ofs = dqs < 2 ? 0x230 : 0x2b0;
1978 /* low or high 8 bit */
1979 phy_ofs += (dqs & 0x1) == 0 ? 0 : 0x9;
1980 for (dq = 0; dq < 8; dq++) {
1981 rd_result->dqs[dqs].dq_min[dq] =
1982 readb(PHY_REG(phy_base, phy_ofs + 0x15 + dq));
1983 rd_result->dqs[dqs].dq_max[dq] =
1984 readb(PHY_REG(phy_base, phy_ofs + 0x27 + dq));
1985 wr_result->dqs[dqs].dq_min[dq] =
1986 readb(PHY_REG(phy_base, phy_ofs + 0x3d + dq));
1987 wr_result->dqs[dqs].dq_max[dq] =
1988 readb(PHY_REG(phy_base, phy_ofs + 0x4f + dq));
1989 }
1990 }
1991}
1992
1993static void save_rw_trn_deskew(void __iomem *phy_base,
1994 struct fsp_rw_trn_result *result, u8 cs_num,
1995 int min_val, bool rw)
1996{
1997 u16 phy_ofs;
1998 u8 cs;
1999 u8 dq;
2000
2001 result->min_val = min_val;
2002
2003 for (cs = 0; cs < cs_num; cs++) {
2004 phy_ofs = cs == 0 ? 0x170 : 0x1a0;
2005 phy_ofs += rw == SKEW_RX_SIGNAL ? 0x1 : 0x17;
2006 for (dq = 0; dq < 8; dq++) {
2007 result->cs[cs].dqs[0].dq_deskew[dq] =
2008 readb(PHY_REG(phy_base, phy_ofs + dq));
2009 result->cs[cs].dqs[1].dq_deskew[dq] =
2010 readb(PHY_REG(phy_base, phy_ofs + 0xb + dq));
2011 result->cs[cs].dqs[2].dq_deskew[dq] =
2012 readb(PHY_REG(phy_base, phy_ofs + 0x60 + dq));
2013 result->cs[cs].dqs[3].dq_deskew[dq] =
2014 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + dq));
2015 }
2016
2017 result->cs[cs].dqs[0].dqs_deskew =
2018 readb(PHY_REG(phy_base, phy_ofs + 0x8));
2019 result->cs[cs].dqs[1].dqs_deskew =
2020 readb(PHY_REG(phy_base, phy_ofs + 0xb + 0x8));
2021 result->cs[cs].dqs[2].dqs_deskew =
2022 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0x8));
2023 result->cs[cs].dqs[3].dqs_deskew =
2024 readb(PHY_REG(phy_base, phy_ofs + 0x60 + 0xb + 0x8));
2025 }
2026}
2027
2028static void save_rw_trn_result_to_ddr(struct rw_trn_result *result)
2029{
2030 result->flag = DDR_DQ_EYE_FLAG;
2031 memcpy((void *)(RW_TRN_RESULT_ADDR), result, sizeof(*result));
2032}
2033#endif
2034
2035static int high_freq_training(struct dram_info *dram,
2036 struct rv1126_sdram_params *sdram_params,
2037 u32 fsp)
2038{
2039 u32 i, j;
2040 void __iomem *phy_base = dram->phy;
2041 u32 dramtype = sdram_params->base.dramtype;
2042 int min_val;
2043 int dqs_skew, clk_skew, ca_skew;
2044 u8 byte_en;
2045 int ret;
2046
2047 byte_en = readl(PHY_REG(phy_base, 0xf)) & PHY_DQ_WIDTH_MASK;
2048 dqs_skew = 0;
2049 for (j = 0; j < sdram_params->ch.cap_info.rank; j++) {
2050 for (i = 0; i < ARRAY_SIZE(wrlvl_result[0]); i++) {
2051 if ((byte_en & BIT(i)) != 0)
2052 dqs_skew += wrlvl_result[j][i];
2053 }
2054 }
2055 dqs_skew = dqs_skew /
2056 (int)(sdram_params->ch.cap_info.rank * (1 << sdram_params->ch.cap_info.bw));
2057
2058 clk_skew = 0x20 - dqs_skew;
2059 dqs_skew = 0x20;
2060
2061 if (dramtype == LPDDR4) {
2062 min_val = 0xff;
2063 for (j = 0; j < sdram_params->ch.cap_info.rank; j++)
2064 for (i = 0; i < sdram_params->ch.cap_info.bw; i++)
2065 min_val = MIN(wrlvl_result[j][i], min_val);
2066
2067 if (min_val < 0) {
2068 clk_skew = -min_val;
2069 ca_skew = -min_val;
2070 } else {
2071 clk_skew = 0;
2072 ca_skew = 0;
2073 }
2074 } else if (dramtype == LPDDR3) {
2075 ca_skew = clk_skew - 4;
2076 } else {
2077 ca_skew = clk_skew;
2078 }
2079 modify_ca_deskew(dram, DESKEW_MDF_ABS_VAL, clk_skew, ca_skew, 3,
2080 dramtype);
2081
2082 writel(wrlvl_result[0][0] + clk_skew, PHY_REG(phy_base, 0x233));
2083 writel(wrlvl_result[0][1] + clk_skew, PHY_REG(phy_base, 0x237));
2084 writel(wrlvl_result[0][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2085 writel(wrlvl_result[0][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2086 ret = data_training(dram, 0, sdram_params, fsp, READ_GATE_TRAINING |
2087 READ_TRAINING | WRITE_TRAINING);
2088#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2089 rw_trn_result.fsp_mhz[fsp] = (u16)sdram_params->base.ddr_freq;
2090 save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[0],
2091 &rw_trn_result.wr_fsp[fsp].cs[0],
2092 rw_trn_result.byte_en);
2093#endif
2094 if (sdram_params->ch.cap_info.rank == 2) {
2095 writel(wrlvl_result[1][0] + clk_skew, PHY_REG(phy_base, 0x233));
2096 writel(wrlvl_result[1][1] + clk_skew, PHY_REG(phy_base, 0x237));
2097 writel(wrlvl_result[1][2] + clk_skew, PHY_REG(phy_base, 0x2b3));
2098 writel(wrlvl_result[1][3] + clk_skew, PHY_REG(phy_base, 0x2b7));
2099 ret |= data_training(dram, 1, sdram_params, fsp,
2100 READ_GATE_TRAINING | READ_TRAINING |
2101 WRITE_TRAINING);
2102#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2103 save_rw_trn_min_max(phy_base, &rw_trn_result.rd_fsp[fsp].cs[1],
2104 &rw_trn_result.wr_fsp[fsp].cs[1],
2105 rw_trn_result.byte_en);
2106#endif
2107 }
2108 if (ret)
2109 goto out;
2110
2111 record_dq_prebit(dram);
2112
2113 min_val = get_min_value(dram, SKEW_RX_SIGNAL,
2114 sdram_params->ch.cap_info.rank) * -1;
2115 modify_dq_deskew(dram, SKEW_RX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2116 min_val, min_val, sdram_params->ch.cap_info.rank);
2117#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2118 save_rw_trn_deskew(phy_base, &rw_trn_result.rd_fsp[fsp],
2119 rw_trn_result.cs_num, (u8)(min_val * (-1)),
2120 SKEW_RX_SIGNAL);
2121#endif
2122
2123 min_val = MIN(get_min_value(dram, SKEW_TX_SIGNAL,
2124 sdram_params->ch.cap_info.rank),
2125 get_min_value(dram, SKEW_CA_SIGNAL,
2126 sdram_params->ch.cap_info.rank)) * -1;
2127
2128 /* clk = 0, rx all skew -7, tx - min_value */
2129 modify_ca_deskew(dram, DESKEW_MDF_DIFF_VAL, min_val, min_val, 3,
2130 dramtype);
2131
2132 modify_dq_deskew(dram, SKEW_TX_SIGNAL, DESKEW_MDF_DIFF_VAL,
2133 min_val, min_val, sdram_params->ch.cap_info.rank);
2134#if defined(CONFIG_CMD_DDR_TEST_TOOL)
2135 save_rw_trn_deskew(phy_base, &rw_trn_result.wr_fsp[fsp],
2136 rw_trn_result.cs_num, (u8)(min_val * (-1)),
2137 SKEW_TX_SIGNAL);
2138#endif
2139
2140 ret = data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING);
2141 if (sdram_params->ch.cap_info.rank == 2)
2142 ret |= data_training(dram, 1, sdram_params, 0,
2143 READ_GATE_TRAINING);
2144out:
2145 return ret;
2146}
2147
2148static void set_ddrconfig(struct dram_info *dram, u32 ddrconfig)
2149{
2150 writel(ddrconfig, &dram->msch->deviceconf);
2151 clrsetbits_le32(&dram->grf->noc_con0, 0x3 << 0, 0 << 0);
2152}
2153
2154static void update_noc_timing(struct dram_info *dram,
2155 struct rv1126_sdram_params *sdram_params)
2156{
2157 void __iomem *pctl_base = dram->pctl;
2158 u32 bw, bl;
2159
2160 bw = 8 << sdram_params->ch.cap_info.bw;
2161 bl = ((readl(pctl_base + DDR_PCTL2_MSTR) >> 16) & 0xf) * 2;
2162
2163 /* update the noc timing related to data bus width */
2164 if ((bw / 8 * bl) <= 16)
2165 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 0;
2166 else if ((bw / 8 * bl) == 32)
2167 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 1;
2168 else if ((bw / 8 * bl) == 64)
2169 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 2;
2170 else
2171 sdram_params->ch.noc_timings.ddrmode.b.burstsize = 3;
2172
2173 sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty =
2174 (bl * bw / 8) > 16 ? (bl / 4) : (16 / (bl * bw / 8)) * bl / 4;
2175
2176 if (sdram_params->base.dramtype == LPDDR4) {
2177 sdram_params->ch.noc_timings.ddrmode.b.mwrsize =
2178 (bw == 16) ? 0x1 : 0x2;
2179 sdram_params->ch.noc_timings.ddrtimingc0.b.wrtomwr =
2180 3 * sdram_params->ch.noc_timings.ddrtimingc0.b.burstpenalty;
2181 }
2182
2183 writel(sdram_params->ch.noc_timings.ddrtiminga0.d32,
2184 &dram->msch->ddrtiminga0);
2185 writel(sdram_params->ch.noc_timings.ddrtimingb0.d32,
2186 &dram->msch->ddrtimingb0);
2187 writel(sdram_params->ch.noc_timings.ddrtimingc0.d32,
2188 &dram->msch->ddrtimingc0);
2189 writel(sdram_params->ch.noc_timings.devtodev0.d32,
2190 &dram->msch->devtodev0);
2191 writel(sdram_params->ch.noc_timings.ddrmode.d32, &dram->msch->ddrmode);
2192 writel(sdram_params->ch.noc_timings.ddr4timing.d32,
2193 &dram->msch->ddr4timing);
2194}
2195
2196static int split_setup(struct dram_info *dram,
2197 struct rv1126_sdram_params *sdram_params)
2198{
2199 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2200 u32 dramtype = sdram_params->base.dramtype;
2201 u32 split_size, split_mode;
2202 u64 cs_cap[2], cap;
2203
2204 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dramtype);
2205 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dramtype);
2206 /* only support the larger cap is in low 16bit */
2207 if (cap_info->cs0_high16bit_row < cap_info->cs0_row) {
2208 cap = cs_cap[0] / (1 << (cap_info->cs0_row -
2209 cap_info->cs0_high16bit_row));
2210 } else if ((cap_info->cs1_high16bit_row < cap_info->cs1_row) &&
2211 (cap_info->rank == 2)) {
2212 if (!cap_info->cs1_high16bit_row)
2213 cap = cs_cap[0];
2214 else
2215 cap = cs_cap[0] + cs_cap[1] / (1 << (cap_info->cs1_row -
2216 cap_info->cs1_high16bit_row));
2217 } else {
2218 goto out;
2219 }
2220 split_size = (u32)(cap >> 24) & SPLIT_SIZE_MASK;
2221 if (cap_info->bw == 2)
2222 split_mode = SPLIT_MODE_32_L16_VALID;
2223 else
2224 split_mode = SPLIT_MODE_16_L8_VALID;
2225
2226 rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2227 (SPLIT_MODE_MASK << SPLIT_MODE_OFFSET) |
2228 (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2229 (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2230 (split_mode << SPLIT_MODE_OFFSET) |
2231 (0x0 << SPLIT_BYPASS_OFFSET) |
2232 (split_size << SPLIT_SIZE_OFFSET));
2233
2234 rk_clrsetreg(BUS_SGRF_BASE_ADDR + SGRF_SOC_CON2,
2235 MSCH_AXI_BYPASS_ALL_MASK << MSCH_AXI_BYPASS_ALL_SHIFT,
2236 0x0 << MSCH_AXI_BYPASS_ALL_SHIFT);
2237
2238out:
2239 return 0;
2240}
2241
2242static void split_bypass(struct dram_info *dram)
2243{
2244 if ((readl(&dram->ddrgrf->grf_ddrsplit_con) &
2245 (1 << SPLIT_BYPASS_OFFSET)) != 0)
2246 return;
2247
2248 /* bypass split */
2249 rk_clrsetreg(&dram->ddrgrf->grf_ddrsplit_con,
2250 (SPLIT_BYPASS_MASK << SPLIT_BYPASS_OFFSET) |
2251 (SPLIT_SIZE_MASK << SPLIT_SIZE_OFFSET),
2252 (0x1 << SPLIT_BYPASS_OFFSET) |
2253 (0x0 << SPLIT_SIZE_OFFSET));
2254}
2255
2256static void dram_all_config(struct dram_info *dram,
2257 struct rv1126_sdram_params *sdram_params)
2258{
2259 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2260 u32 dram_type = sdram_params->base.dramtype;
2261 void __iomem *pctl_base = dram->pctl;
2262 u32 sys_reg2 = 0;
2263 u32 sys_reg3 = 0;
2264 u64 cs_cap[2];
2265 u32 cs_pst;
2266
2267 set_ddrconfig(dram, cap_info->ddrconfig);
2268 sdram_org_config(cap_info, &sdram_params->base, &sys_reg2,
2269 &sys_reg3, 0);
2270 writel(sys_reg2, &dram->pmugrf->os_reg[2]);
2271 writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2272
2273 cs_cap[0] = sdram_get_cs_cap(cap_info, 0, dram_type);
2274 cs_cap[1] = sdram_get_cs_cap(cap_info, 1, dram_type);
2275
2276 if (cap_info->rank == 2) {
2277 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2278 6 + 2;
2279 if (cs_pst > 28)
2280 cs_cap[0] = 1llu << cs_pst;
2281 }
2282
2283 writel(((((cs_cap[1] >> 20) / 64) & 0xff) << 8) |
2284 (((cs_cap[0] >> 20) / 64) & 0xff),
2285 &dram->msch->devicesize);
2286 update_noc_timing(dram, sdram_params);
2287}
2288
2289static void enable_low_power(struct dram_info *dram,
2290 struct rv1126_sdram_params *sdram_params)
2291{
2292 void __iomem *pctl_base = dram->pctl;
2293 u32 grf_lp_con;
2294
2295 writel(0x1f1f0617, &dram->ddrgrf->ddr_grf_con[1]);
2296
2297 if (sdram_params->base.dramtype == DDR4)
2298 grf_lp_con = (0x7 << 16) | (1 << 1);
2299 else if (sdram_params->base.dramtype == DDR3)
2300 grf_lp_con = (0x7 << 16) | (1 << 0);
2301 else
2302 grf_lp_con = (0x7 << 16) | (1 << 2);
2303
2304 /* en lpckdis_en */
2305 grf_lp_con = grf_lp_con | (0x1 << (9 + 16)) | (0x1 << 9);
2306 writel(grf_lp_con, &dram->ddrgrf->ddr_grf_lp_con);
2307
2308 /* enable sr, pd */
2309 if (dram->pd_idle == 0)
2310 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2311 else
2312 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 1));
2313 if (dram->sr_idle == 0)
2314 clrbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2315 else
2316 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, 1);
2317 setbits_le32(pctl_base + DDR_PCTL2_PWRCTL, (1 << 3));
2318}
2319
2320static void print_ddr_info(struct rv1126_sdram_params *sdram_params)
2321{
2322 u32 split;
2323
2324 if ((readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2325 (1 << SPLIT_BYPASS_OFFSET)) != 0)
2326 split = 0;
2327 else
2328 split = readl(DDR_GRF_BASE_ADDR + DDR_GRF_SPLIT_CON) &
2329 SPLIT_SIZE_MASK;
2330
2331 sdram_print_ddr_info(&sdram_params->ch.cap_info,
2332 &sdram_params->base, split);
2333}
2334
2335static int sdram_init_(struct dram_info *dram,
2336 struct rv1126_sdram_params *sdram_params, u32 post_init)
2337{
2338 void __iomem *pctl_base = dram->pctl;
2339 void __iomem *phy_base = dram->phy;
2340 u32 ddr4_vref;
2341 u32 mr_tmp;
2342
2343 rkclk_configure_ddr(dram, sdram_params);
2344
2345 rkclk_ddr_reset(dram, 1, 1, 1, 1);
2346 udelay(10);
2347
2348 rkclk_ddr_reset(dram, 1, 1, 1, 0);
2349 phy_cfg(dram, sdram_params);
2350
2351 rkclk_ddr_reset(dram, 1, 1, 0, 0);
2352 phy_pll_set(dram, sdram_params->base.ddr_freq * MHZ, 1);
2353
2354 rkclk_ddr_reset(dram, 1, 0, 0, 0);
2355 pctl_cfg(dram->pctl, &sdram_params->pctl_regs,
2356 dram->sr_idle, dram->pd_idle);
2357
2358 if (sdram_params->ch.cap_info.bw == 2) {
2359 /* 32bit interface use pageclose */
2360 setbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2361 /* pageclose = 1, pageclose_timer = 0 will err in lp4 328MHz */
2362 clrsetbits_le32(pctl_base + DDR_PCTL2_SCHED1, 0xff, 0x1 << 0);
2363 } else {
2364 clrbits_le32(pctl_base + DDR_PCTL2_SCHED, 1 << 2);
2365 }
2366
2367#ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2368 u32 tmp, trefi;
2369
2370 tmp = readl(pctl_base + DDR_PCTL2_RFSHTMG);
2371 trefi = (tmp >> 16) & 0xfff;
2372 writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2373 pctl_base + DDR_PCTL2_RFSHTMG);
2374#endif
2375
2376 /* set frequency_mode */
2377 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
2378 /* set target_frequency to Frequency 0 */
2379 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, 0);
2380
2381 set_ds_odt(dram, sdram_params, 0);
2382 sdram_params->ch.cap_info.ddrconfig = calculate_ddrconfig(sdram_params);
2383 set_ctl_address_map(dram, sdram_params);
2384
2385 setbits_le32(pctl_base + DDR_PCTL2_DFIMISC, (1 << 5) | (1 << 4));
2386
2387 rkclk_ddr_reset(dram, 0, 0, 0, 0);
2388
2389 while ((readl(pctl_base + DDR_PCTL2_STAT) & 0x7) == 0)
2390 continue;
2391
2392 if (sdram_params->base.dramtype == LPDDR3) {
2393 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, LPDDR3);
2394 } else if (sdram_params->base.dramtype == LPDDR4) {
2395 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT6);
2396 /* MR11 */
2397 pctl_write_mr(dram->pctl, 3, 11,
2398 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2399 LPDDR4);
2400 /* MR12 */
2401 pctl_write_mr(dram->pctl, 3, 12,
2402 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2403 LPDDR4);
2404
2405 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2406 /* MR22 */
2407 pctl_write_mr(dram->pctl, 3, 22,
2408 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2409 LPDDR4);
2410 }
2411
2412 if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) != 0) {
2413 if (post_init != 0)
2414 printascii("DTT cs0 error\n");
2415 return -1;
2416 }
2417
2418 if (sdram_params->base.dramtype == LPDDR4) {
2419 mr_tmp = read_mr(dram, 1, 14, LPDDR4);
2420
2421 if (mr_tmp != 0x4d)
2422 return -1;
2423 }
2424
2425 if (sdram_params->base.dramtype == LPDDR4) {
2426 mr_tmp = readl(pctl_base + DDR_PCTL2_INIT7);
2427 /* MR14 */
2428 pctl_write_mr(dram->pctl, 3, 14,
2429 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2430 LPDDR4);
2431 }
2432 if (post_init != 0 && sdram_params->ch.cap_info.rank == 2) {
2433 if (data_training(dram, 1, sdram_params, 0,
2434 READ_GATE_TRAINING) != 0) {
2435 printascii("DTT cs1 error\n");
2436 return -1;
2437 }
2438 }
2439
2440 if (sdram_params->base.dramtype == DDR4) {
2441 ddr4_vref = readl(PHY_REG(phy_base, 0x105)) * 39;
2442 pctl_write_vrefdq(dram->pctl, 0x3, ddr4_vref,
2443 sdram_params->base.dramtype);
2444 }
2445
2446 dram_all_config(dram, sdram_params);
2447 enable_low_power(dram, sdram_params);
2448
2449 return 0;
2450}
2451
2452static u64 dram_detect_cap(struct dram_info *dram,
2453 struct rv1126_sdram_params *sdram_params,
2454 unsigned char channel)
2455{
2456 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2457 void __iomem *pctl_base = dram->pctl;
2458 void __iomem *phy_base = dram->phy;
2459 u32 mr8;
2460
2461 u32 bktmp;
2462 u32 coltmp;
2463 u32 rowtmp;
2464 u32 cs;
2465 u32 dram_type = sdram_params->base.dramtype;
2466 u32 pwrctl;
2467 u32 i, dq_map;
2468 u32 byte1 = 0, byte0 = 0;
2469 u32 tmp, byte;
2470 struct sdram_head_info_index_v2 *index = (struct sdram_head_info_index_v2 *)common_info;
2471 struct dq_map_info *map_info = (struct dq_map_info *)
2472 ((void *)common_info + index->dq_map_index.offset * 4);
2473
2474 cap_info->bw = dram_type == DDR3 ? 0 : 1;
2475 if (dram_type != LPDDR4) {
2476 if (dram_type != DDR4) {
2477 coltmp = 12;
2478 bktmp = 3;
2479 if (dram_type == LPDDR2)
2480 rowtmp = 15;
2481 else
2482 rowtmp = 16;
2483
2484 if (sdram_detect_col(cap_info, coltmp) != 0)
2485 goto cap_err;
2486
2487 sdram_detect_bank(cap_info, coltmp, bktmp);
2488 if (dram_type != LPDDR3)
2489 sdram_detect_dbw(cap_info, dram_type);
2490 } else {
2491 coltmp = 10;
2492 bktmp = 4;
2493 rowtmp = 17;
2494
2495 cap_info->col = 10;
2496 cap_info->bk = 2;
2497 sdram_detect_bg(cap_info, coltmp);
2498 }
2499
2500 if (sdram_detect_row(cap_info, coltmp, bktmp, rowtmp) != 0)
2501 goto cap_err;
2502
2503 sdram_detect_row_3_4(cap_info, coltmp, bktmp);
2504 } else {
2505 cap_info->col = 10;
2506 cap_info->bk = 3;
2507 mr8 = read_mr(dram, 1, 8, dram_type);
2508 cap_info->dbw = ((mr8 >> 6) & 0x3) == 0 ? 1 : 0;
2509 mr8 = (mr8 >> 2) & 0xf;
2510 if (mr8 >= 0 && mr8 <= 6) {
2511 cap_info->cs0_row = 14 + (mr8 + 1) / 2;
2512 } else if (mr8 == 0xc) {
2513 cap_info->cs0_row = 13;
2514 } else {
2515 printascii("Cap ERR: Fail to get cap of LPDDR4/X from MR8\n");
2516 goto cap_err;
2517 }
2518 if (cap_info->dbw == 0)
2519 cap_info->cs0_row++;
2520 cap_info->row_3_4 = mr8 % 2 == 1 ? 1 : 0;
2521 if (cap_info->cs0_row >= 17) {
2522 printascii("Cap ERR: ");
2523 printascii("RV1126 LPDDR4/X cannot support row >= 17\n");
2524 goto cap_err;
2525 // cap_info->cs0_row = 16;
2526 // cap_info->row_3_4 = 0;
2527 }
2528 }
2529
2530 pwrctl = readl(pctl_base + DDR_PCTL2_PWRCTL);
2531 writel(0, pctl_base + DDR_PCTL2_PWRCTL);
2532
2533 if (data_training(dram, 1, sdram_params, 0, READ_GATE_TRAINING) == 0)
2534 cs = 1;
2535 else
2536 cs = 0;
2537 cap_info->rank = cs + 1;
2538
2539 setbits_le32(PHY_REG(phy_base, 0xf), 0xf);
2540
2541 tmp = data_training_rg(dram, 0, dram_type) & 0xf;
2542
2543 if (tmp == 0) {
2544 cap_info->bw = 2;
2545 } else {
2546 if (dram_type == DDR3 || dram_type == DDR4) {
2547 dq_map = 0;
2548 byte = 0;
2549 for (i = 0; i < 4; i++) {
2550 if ((tmp & BIT(i)) == 0) {
2551 dq_map |= byte << (i * 2);
2552 byte++;
2553 }
2554 }
2555 cap_info->bw = byte / 2;
2556 for (i = 0; i < 4; i++) {
2557 if ((tmp & BIT(i)) != 0) {
2558 dq_map |= byte << (i * 2);
2559 byte++;
2560 }
2561 }
2562 clrsetbits_le32(&map_info->byte_map[0], 0xff << 24, dq_map << 24);
2563 } else {
2564 dq_map = readl(PHY_REG(phy_base, 0x4f));
2565 for (i = 0; i < 4; i++) {
2566 if (((dq_map >> (i * 2)) & 0x3) == 0)
2567 byte0 = i;
2568 if (((dq_map >> (i * 2)) & 0x3) == 1)
2569 byte1 = i;
2570 }
2571 clrsetbits_le32(PHY_REG(phy_base, 0xf), PHY_DQ_WIDTH_MASK,
2572 BIT(byte0) | BIT(byte1));
2573 if (data_training(dram, 0, sdram_params, 0, READ_GATE_TRAINING) == 0)
2574 cap_info->bw = 1;
2575 else
2576 cap_info->bw = 0;
2577 }
2578 }
2579 if (cap_info->bw > 0)
2580 cap_info->dbw = 1;
2581
2582 writel(pwrctl, pctl_base + DDR_PCTL2_PWRCTL);
2583
2584 cap_info->cs0_high16bit_row = cap_info->cs0_row;
2585 if (cs) {
2586 cap_info->cs1_row = cap_info->cs0_row;
2587 cap_info->cs1_high16bit_row = cap_info->cs0_row;
2588 } else {
2589 cap_info->cs1_row = 0;
2590 cap_info->cs1_high16bit_row = 0;
2591 }
2592
2593 if (dram_type == LPDDR3)
2594 sdram_detect_dbw(cap_info, dram_type);
2595
2596 return 0;
2597cap_err:
2598 return -1;
2599}
2600
2601static int dram_detect_cs1_row(struct dram_info *dram,
2602 struct rv1126_sdram_params *sdram_params,
2603 unsigned char channel)
2604{
2605 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2606 void __iomem *pctl_base = dram->pctl;
2607 u32 ret = 0;
2608 void __iomem *test_addr;
2609 u32 row, bktmp, coltmp, bw;
2610 u64 cs0_cap;
2611 u32 byte_mask;
2612 u32 cs_pst;
2613 u32 cs_add = 0;
2614 u32 max_row;
2615
2616 if (cap_info->rank == 2) {
2617 cs_pst = (readl(pctl_base + DDR_PCTL2_ADDRMAP0) & 0x1f) +
2618 6 + 2;
2619 if (cs_pst < 28)
2620 cs_add = 1;
2621
2622 cs0_cap = 1 << cs_pst;
2623
2624 if (sdram_params->base.dramtype == DDR4) {
2625 if (cap_info->dbw == 0)
2626 bktmp = cap_info->bk + 2;
2627 else
2628 bktmp = cap_info->bk + 1;
2629 } else {
2630 bktmp = cap_info->bk;
2631 }
2632 bw = cap_info->bw;
2633 coltmp = cap_info->col;
2634
2635 if (bw == 2)
2636 byte_mask = 0xFFFF;
2637 else
2638 byte_mask = 0xFF;
2639
2640 max_row = (cs_pst == 31) ? 30 : 31;
2641
2642 max_row = max_row - bktmp - coltmp - bw - cs_add + 1;
2643
2644 row = (cap_info->cs0_row > max_row) ? max_row :
2645 cap_info->cs0_row;
2646
2647 for (; row > 12; row--) {
2648 test_addr = (void __iomem *)(CFG_SYS_SDRAM_BASE +
2649 (u32)cs0_cap +
2650 (1ul << (row + bktmp + coltmp +
2651 cs_add + bw - 1ul)));
2652
2653 writel(0, CFG_SYS_SDRAM_BASE + (u32)cs0_cap);
2654 writel(PATTERN, test_addr);
2655
2656 if (((readl(test_addr) & byte_mask) ==
2657 (PATTERN & byte_mask)) &&
2658 ((readl(CFG_SYS_SDRAM_BASE + (u32)cs0_cap) &
2659 byte_mask) == 0)) {
2660 ret = row;
2661 break;
2662 }
2663 }
2664 }
2665
2666 return ret;
2667}
2668
2669/* return: 0 = success, other = fail */
2670static int sdram_init_detect(struct dram_info *dram,
2671 struct rv1126_sdram_params *sdram_params)
2672{
2673 struct sdram_cap_info *cap_info = &sdram_params->ch.cap_info;
2674 u32 ret;
2675 u32 sys_reg = 0;
2676 u32 sys_reg3 = 0;
2677 struct sdram_head_info_index_v2 *index =
2678 (struct sdram_head_info_index_v2 *)common_info;
2679 struct dq_map_info *map_info;
2680
2681 map_info = (struct dq_map_info *)((void *)common_info +
2682 index->dq_map_index.offset * 4);
2683
2684 if (sdram_init_(dram, sdram_params, 0)) {
2685 if (sdram_params->base.dramtype == DDR3) {
2686 clrsetbits_le32(&map_info->byte_map[0], 0xff << 24,
2687 ((0x1 << 6) | (0x3 << 4) | (0x2 << 2) |
2688 (0x0 << 0)) << 24);
2689 if (sdram_init_(dram, sdram_params, 0))
2690 return -1;
2691 } else {
2692 return -1;
2693 }
2694 }
2695
2696 if (sdram_params->base.dramtype == DDR3) {
2697 writel(PATTERN, CFG_SYS_SDRAM_BASE);
2698 if (readl(CFG_SYS_SDRAM_BASE) != PATTERN)
2699 return -1;
2700 }
2701
2702 split_bypass(dram);
2703 if (dram_detect_cap(dram, sdram_params, 0) != 0)
2704 return -1;
2705
2706 pctl_remodify_sdram_params(&sdram_params->pctl_regs, cap_info,
2707 sdram_params->base.dramtype);
2708 ret = sdram_init_(dram, sdram_params, 1);
2709 if (ret != 0)
2710 goto out;
2711
2712 cap_info->cs1_row =
2713 dram_detect_cs1_row(dram, sdram_params, 0);
2714 if (cap_info->cs1_row) {
2715 sys_reg = readl(&dram->pmugrf->os_reg[2]);
2716 sys_reg3 = readl(&dram->pmugrf->os_reg[3]);
2717 SYS_REG_ENC_CS1_ROW(cap_info->cs1_row,
2718 sys_reg, sys_reg3, 0);
2719 writel(sys_reg, &dram->pmugrf->os_reg[2]);
2720 writel(sys_reg3, &dram->pmugrf->os_reg[3]);
2721 }
2722
2723 sdram_detect_high_row(cap_info);
2724 split_setup(dram, sdram_params);
2725out:
2726 return ret;
2727}
2728
2729struct rv1126_sdram_params *get_default_sdram_config(u32 freq_mhz)
2730{
2731 u32 i;
2732 u32 offset = 0;
2733 struct ddr2_3_4_lp2_3_info *ddr_info;
2734
2735 if (!freq_mhz) {
2736 ddr_info = get_ddr_drv_odt_info(sdram_configs[0].base.dramtype);
2737 if (ddr_info)
2738 freq_mhz =
2739 (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
2740 DDR_FREQ_MASK;
2741 else
2742 freq_mhz = 0;
2743 }
2744
2745 for (i = 0; i < ARRAY_SIZE(sdram_configs); i++) {
2746 if (sdram_configs[i].base.ddr_freq == 0 ||
2747 freq_mhz < sdram_configs[i].base.ddr_freq)
2748 break;
2749 }
2750 offset = i == 0 ? 0 : i - 1;
2751
2752 return &sdram_configs[offset];
2753}
2754
2755static const u16 pctl_need_update_reg[] = {
2756 DDR_PCTL2_RFSHTMG,
2757 DDR_PCTL2_INIT3,
2758 DDR_PCTL2_INIT4,
2759 DDR_PCTL2_INIT6,
2760 DDR_PCTL2_INIT7,
2761 DDR_PCTL2_DRAMTMG0,
2762 DDR_PCTL2_DRAMTMG1,
2763 DDR_PCTL2_DRAMTMG2,
2764 DDR_PCTL2_DRAMTMG3,
2765 DDR_PCTL2_DRAMTMG4,
2766 DDR_PCTL2_DRAMTMG5,
2767 DDR_PCTL2_DRAMTMG6,
2768 DDR_PCTL2_DRAMTMG7,
2769 DDR_PCTL2_DRAMTMG8,
2770 DDR_PCTL2_DRAMTMG9,
2771 DDR_PCTL2_DRAMTMG12,
2772 DDR_PCTL2_DRAMTMG13,
2773 DDR_PCTL2_DRAMTMG14,
2774 DDR_PCTL2_ZQCTL0,
2775 DDR_PCTL2_DFITMG0,
2776 DDR_PCTL2_ODTCFG
2777};
2778
2779static const u16 phy_need_update_reg[] = {
2780 0x14,
2781 0x18,
2782 0x1c
2783};
2784
2785static void pre_set_rate(struct dram_info *dram,
2786 struct rv1126_sdram_params *sdram_params,
2787 u32 dst_fsp, u32 dst_fsp_lp4)
2788{
2789 u32 i, j, find;
2790 void __iomem *pctl_base = dram->pctl;
2791 void __iomem *phy_base = dram->phy;
2792 u32 phy_offset;
2793 u32 mr_tmp;
2794 u32 dramtype = sdram_params->base.dramtype;
2795
2796 sw_set_req(dram);
2797 /* pctl timing update */
2798 for (i = 0, find = 0; i < ARRAY_SIZE(pctl_need_update_reg); i++) {
2799 for (j = find; sdram_params->pctl_regs.pctl[j][0] != 0xFFFFFFFF;
2800 j++) {
2801 if (sdram_params->pctl_regs.pctl[j][0] ==
2802 pctl_need_update_reg[i]) {
2803 writel(sdram_params->pctl_regs.pctl[j][1],
2804 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2805 pctl_need_update_reg[i]);
2806 find = j;
2807 break;
2808 }
2809 }
2810 }
2811
2812#ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
2813 u32 tmp, trefi;
2814
2815 tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2816 trefi = (tmp >> 16) & 0xfff;
2817 writel((tmp & 0xf000ffff) | (trefi / 2) << 16,
2818 pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_RFSHTMG);
2819#endif
2820
2821 sw_set_ack(dram);
2822
2823 /* phy timing update */
2824 if (dst_fsp == 0)
2825 phy_offset = 0;
2826 else
2827 phy_offset = PHY_REG(0, 0x387 - 5 + (dst_fsp - 1) * 3);
2828 /* cl cwl al update */
2829 for (i = 0, find = 0; i < ARRAY_SIZE(phy_need_update_reg); i++) {
2830 for (j = find; sdram_params->phy_regs.phy[j][0] != 0xFFFFFFFF;
2831 j++) {
2832 if (sdram_params->phy_regs.phy[j][0] ==
2833 phy_need_update_reg[i]) {
2834 writel(sdram_params->phy_regs.phy[j][1],
2835 phy_base + phy_offset +
2836 phy_need_update_reg[i]);
2837 find = j;
2838 break;
2839 }
2840 }
2841 }
2842
2843 set_ds_odt(dram, sdram_params, dst_fsp);
2844 if (dramtype == LPDDR4) {
2845 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2846 DDR_PCTL2_INIT4);
2847 /* MR13 */
2848 pctl_write_mr(dram->pctl, 3, 13,
2849 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2850 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2851 ((0x2 << 6) >> dst_fsp_lp4), dramtype);
2852 writel(((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
2853 PCTL2_MR_MASK) & (~(BIT(7) | BIT(6)))) |
2854 ((0x2 << 6) >> dst_fsp_lp4),
2855 PHY_REG(phy_base, 0x1b));
2856 /* MR3 */
2857 pctl_write_mr(dram->pctl, 3, 3,
2858 mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT &
2859 PCTL2_MR_MASK,
2860 dramtype);
2861 writel(mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT & PCTL2_MR_MASK,
2862 PHY_REG(phy_base, 0x19));
2863
2864 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2865 DDR_PCTL2_INIT3);
2866 /* MR1 */
2867 pctl_write_mr(dram->pctl, 3, 1,
2868 mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT &
2869 PCTL2_MR_MASK,
2870 dramtype);
2871 writel(mr_tmp >> PCTL2_LPDDR234_MR1_SHIFT & PCTL2_MR_MASK,
2872 PHY_REG(phy_base, 0x17));
2873 /* MR2 */
2874 pctl_write_mr(dram->pctl, 3, 2, mr_tmp & PCTL2_MR_MASK,
2875 dramtype);
2876 writel(mr_tmp & PCTL2_MR_MASK,
2877 PHY_REG(phy_base, 0x18));
2878
2879 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2880 DDR_PCTL2_INIT6);
2881 /* MR11 */
2882 pctl_write_mr(dram->pctl, 3, 11,
2883 mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2884 dramtype);
2885 writel(mr_tmp >> PCTL2_LPDDR4_MR11_SHIFT & PCTL2_MR_MASK,
2886 PHY_REG(phy_base, 0x1a));
2887 /* MR12 */
2888 pctl_write_mr(dram->pctl, 3, 12,
2889 mr_tmp >> PCTL2_LPDDR4_MR12_SHIFT & PCTL2_MR_MASK,
2890 dramtype);
2891
2892 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2893 DDR_PCTL2_INIT7);
2894 /* MR22 */
2895 pctl_write_mr(dram->pctl, 3, 22,
2896 mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2897 dramtype);
2898 writel(mr_tmp >> PCTL2_LPDDR4_MR22_SHIFT & PCTL2_MR_MASK,
2899 PHY_REG(phy_base, 0x1d));
2900 /* MR14 */
2901 pctl_write_mr(dram->pctl, 3, 14,
2902 mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2903 dramtype);
2904 writel(mr_tmp >> PCTL2_LPDDR4_MR14_SHIFT & PCTL2_MR_MASK,
2905 PHY_REG(phy_base, 0x1c));
2906 }
2907
2908 update_noc_timing(dram, sdram_params);
2909}
2910
2911static void save_fsp_param(struct dram_info *dram, u32 dst_fsp,
2912 struct rv1126_sdram_params *sdram_params)
2913{
2914 void __iomem *pctl_base = dram->pctl;
2915 void __iomem *phy_base = dram->phy;
2916 struct rv1126_fsp_param *p_fsp_param = &fsp_param[dst_fsp];
2917 u32 temp, temp1;
2918 struct ddr2_3_4_lp2_3_info *ddr_info;
2919
2920 ddr_info = get_ddr_drv_odt_info(sdram_params->base.dramtype);
2921
2922 p_fsp_param->freq_mhz = sdram_params->base.ddr_freq;
2923
2924 if (sdram_params->base.dramtype == LPDDR4) {
2925 p_fsp_param->rd_odt_up_en = 0;
2926 p_fsp_param->rd_odt_down_en = 1;
2927 } else {
2928 p_fsp_param->rd_odt_up_en =
2929 ODT_INFO_PULLUP_EN(ddr_info->odt_info);
2930 p_fsp_param->rd_odt_down_en =
2931 ODT_INFO_PULLDOWN_EN(ddr_info->odt_info);
2932 }
2933
2934 if (p_fsp_param->rd_odt_up_en)
2935 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x111));
2936 else if (p_fsp_param->rd_odt_down_en)
2937 p_fsp_param->rd_odt = readl(PHY_REG(phy_base, 0x110));
2938 else
2939 p_fsp_param->rd_odt = 0;
2940 p_fsp_param->wr_dq_drv = readl(PHY_REG(phy_base, 0x112));
2941 p_fsp_param->wr_ca_drv = readl(PHY_REG(phy_base, 0x100));
2942 p_fsp_param->wr_ckcs_drv = readl(PHY_REG(phy_base, 0x102));
2943 p_fsp_param->vref_inner = readl(PHY_REG(phy_base, 0x128));
2944 p_fsp_param->vref_out = readl(PHY_REG(phy_base, 0x105));
2945
2946 if (sdram_params->base.dramtype == DDR3) {
2947 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2948 DDR_PCTL2_INIT3);
2949 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2950 p_fsp_param->ds_pdds = temp & DDR3_DS_MASK;
2951 p_fsp_param->dq_odt = temp & DDR3_RTT_NOM_MASK;
2952 p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2953 } else if (sdram_params->base.dramtype == DDR4) {
2954 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2955 DDR_PCTL2_INIT3);
2956 temp = (temp >> PCTL2_DDR34_MR1_SHIFT) & PCTL2_MR_MASK;
2957 p_fsp_param->ds_pdds = temp & DDR4_DS_MASK;
2958 p_fsp_param->dq_odt = temp & DDR4_RTT_NOM_MASK;
2959 p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2960 } else if (sdram_params->base.dramtype == LPDDR3) {
2961 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2962 DDR_PCTL2_INIT4);
2963 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2964 p_fsp_param->ds_pdds = temp & 0xf;
2965
2966 p_fsp_param->dq_odt = lp3_odt_value;
2967 p_fsp_param->ca_odt = p_fsp_param->dq_odt;
2968 } else if (sdram_params->base.dramtype == LPDDR4) {
2969 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2970 DDR_PCTL2_INIT4);
2971 temp = (temp >> PCTL2_LPDDR234_MR3_SHIFT) & PCTL2_MR_MASK;
2972 p_fsp_param->ds_pdds = temp & LPDDR4_PDDS_MASK;
2973
2974 temp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
2975 DDR_PCTL2_INIT6);
2976 temp = (temp >> PCTL2_LPDDR4_MR11_SHIFT) & PCTL2_MR_MASK;
2977 p_fsp_param->dq_odt = temp & LPDDR4_DQODT_MASK;
2978 p_fsp_param->ca_odt = temp & LPDDR4_CAODT_MASK;
2979
2980 temp = MAX(readl(PHY_REG(phy_base, 0x3ae)),
2981 readl(PHY_REG(phy_base, 0x3ce)));
2982 temp1 = MIN(readl(PHY_REG(phy_base, 0x3be)),
2983 readl(PHY_REG(phy_base, 0x3de)));
2984 p_fsp_param->vref_ca[0] = (temp + temp1) / 2;
2985 temp = MAX(readl(PHY_REG(phy_base, 0x3af)),
2986 readl(PHY_REG(phy_base, 0x3cf)));
2987 temp1 = MIN(readl(PHY_REG(phy_base, 0x3bf)),
2988 readl(PHY_REG(phy_base, 0x3df)));
2989 p_fsp_param->vref_ca[1] = (temp + temp1) / 2;
2990 p_fsp_param->vref_ca[0] |=
2991 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2992 p_fsp_param->vref_ca[1] |=
2993 (readl(PHY_REG(phy_base, 0x1e)) & BIT(6));
2994
2995 p_fsp_param->lp4_drv_pd_en = (readl(PHY_REG(phy_base, 0x114)) >>
2996 3) & 0x1;
2997 }
2998
2999 p_fsp_param->noc_timings.ddrtiminga0 =
3000 sdram_params->ch.noc_timings.ddrtiminga0;
3001 p_fsp_param->noc_timings.ddrtimingb0 =
3002 sdram_params->ch.noc_timings.ddrtimingb0;
3003 p_fsp_param->noc_timings.ddrtimingc0 =
3004 sdram_params->ch.noc_timings.ddrtimingc0;
3005 p_fsp_param->noc_timings.devtodev0 =
3006 sdram_params->ch.noc_timings.devtodev0;
3007 p_fsp_param->noc_timings.ddrmode =
3008 sdram_params->ch.noc_timings.ddrmode;
3009 p_fsp_param->noc_timings.ddr4timing =
3010 sdram_params->ch.noc_timings.ddr4timing;
3011 p_fsp_param->noc_timings.agingx0 =
3012 sdram_params->ch.noc_timings.agingx0;
3013 p_fsp_param->noc_timings.aging0 =
3014 sdram_params->ch.noc_timings.aging0;
3015 p_fsp_param->noc_timings.aging1 =
3016 sdram_params->ch.noc_timings.aging1;
3017 p_fsp_param->noc_timings.aging2 =
3018 sdram_params->ch.noc_timings.aging2;
3019 p_fsp_param->noc_timings.aging3 =
3020 sdram_params->ch.noc_timings.aging3;
3021
3022 p_fsp_param->flag = FSP_FLAG;
3023}
3024
3025static void copy_fsp_param_to_ddr(void)
3026{
3027 memcpy((void *)FSP_PARAM_STORE_ADDR, (void *)&fsp_param,
3028 sizeof(fsp_param));
3029}
3030
3031static void pctl_modify_trfc(struct ddr_pctl_regs *pctl_regs,
3032 struct sdram_cap_info *cap_info, u32 dram_type,
3033 u32 freq)
3034{
3035 u64 cs0_cap;
3036 u32 die_cap;
3037 u32 trfc_ns, trfc4_ns;
3038 u32 trfc, txsnr;
3039 u32 txs_abort_fast = 0;
3040 u32 tmp;
3041
3042 cs0_cap = sdram_get_cs_cap(cap_info, 0, dram_type);
3043 die_cap = (u32)(cs0_cap >> (20 + (cap_info->bw - cap_info->dbw)));
3044
3045 switch (dram_type) {
3046 case DDR3:
3047 if (die_cap <= DIE_CAP_512MBIT)
3048 trfc_ns = 90;
3049 else if (die_cap <= DIE_CAP_1GBIT)
3050 trfc_ns = 110;
3051 else if (die_cap <= DIE_CAP_2GBIT)
3052 trfc_ns = 160;
3053 else if (die_cap <= DIE_CAP_4GBIT)
3054 trfc_ns = 260;
3055 else
3056 trfc_ns = 350;
3057 txsnr = MAX(5, ((trfc_ns + 10) * freq + 999) / 1000);
3058 break;
3059
3060 case DDR4:
3061 if (die_cap <= DIE_CAP_2GBIT) {
3062 trfc_ns = 160;
3063 trfc4_ns = 90;
3064 } else if (die_cap <= DIE_CAP_4GBIT) {
3065 trfc_ns = 260;
3066 trfc4_ns = 110;
3067 } else if (die_cap <= DIE_CAP_8GBIT) {
3068 trfc_ns = 350;
3069 trfc4_ns = 160;
3070 } else {
3071 trfc_ns = 550;
3072 trfc4_ns = 260;
3073 }
3074 txsnr = ((trfc_ns + 10) * freq + 999) / 1000;
3075 txs_abort_fast = ((trfc4_ns + 10) * freq + 999) / 1000;
3076 break;
3077
3078 case LPDDR3:
3079 if (die_cap <= DIE_CAP_4GBIT)
3080 trfc_ns = 130;
3081 else
3082 trfc_ns = 210;
3083 txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3084 break;
3085
3086 case LPDDR4:
3087 if (die_cap <= DIE_CAP_2GBIT)
3088 trfc_ns = 130;
3089 else if (die_cap <= DIE_CAP_4GBIT)
3090 trfc_ns = 180;
3091 else if (die_cap <= DIE_CAP_8GBIT)
3092 trfc_ns = 280;
3093 else
3094 trfc_ns = 380;
3095 txsnr = MAX(2, ((trfc_ns + 10) * freq + 999) / 1000);
3096 break;
3097
3098 default:
3099 return;
3100 }
3101 trfc = (trfc_ns * freq + 999) / 1000;
3102
3103 for (int i = 0; pctl_regs->pctl[i][0] != 0xffffffff; i++) {
3104 switch (pctl_regs->pctl[i][0]) {
3105 case DDR_PCTL2_RFSHTMG:
3106 tmp = pctl_regs->pctl[i][1];
3107 /* t_rfc_min */
3108 tmp &= ~((u32)0x3ff);
3109 tmp |= ((trfc + 1) / 2) & 0x3ff;
3110 pctl_regs->pctl[i][1] = tmp;
3111 break;
3112
3113 case DDR_PCTL2_DRAMTMG8:
3114 if (dram_type == DDR3 || dram_type == DDR4) {
3115 tmp = pctl_regs->pctl[i][1];
3116 /* t_xs_x32 */
3117 tmp &= ~((u32)0x7f);
3118 tmp |= ((txsnr + 63) / 64) & 0x7f;
3119
3120 if (dram_type == DDR4) {
3121 /* t_xs_abort_x32 */
3122 tmp &= ~((u32)(0x7f << 16));
3123 tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 16;
3124 /* t_xs_fast_x32 */
3125 tmp &= ~((u32)(0x7f << 24));
3126 tmp |= (((txs_abort_fast + 63) / 64) & 0x7f) << 24;
3127 }
3128
3129 pctl_regs->pctl[i][1] = tmp;
3130 }
3131 break;
3132
3133 case DDR_PCTL2_DRAMTMG14:
3134 if (dram_type == LPDDR3 ||
3135 dram_type == LPDDR4) {
3136 tmp = pctl_regs->pctl[i][1];
3137 /* t_xsr */
3138 tmp &= ~((u32)0xfff);
3139 tmp |= ((txsnr + 1) / 2) & 0xfff;
3140 pctl_regs->pctl[i][1] = tmp;
3141 }
3142 break;
3143
3144 default:
3145 break;
3146 }
3147 }
3148}
3149
3150void ddr_set_rate(struct dram_info *dram,
3151 struct rv1126_sdram_params *sdram_params,
3152 u32 freq, u32 cur_freq, u32 dst_fsp,
3153 u32 dst_fsp_lp4, u32 training_en)
3154{
3155 u32 dest_dll_off, cur_init3, dst_init3, cur_fsp, cur_dll_off;
3156 u32 mr_tmp;
3157 u32 lp_stat;
3158 u32 dramtype = sdram_params->base.dramtype;
3159 struct rv1126_sdram_params *sdram_params_new;
3160 void __iomem *pctl_base = dram->pctl;
3161 void __iomem *phy_base = dram->phy;
3162
3163 lp_stat = low_power_update(dram, 0);
3164 sdram_params_new = get_default_sdram_config(freq);
3165 sdram_params_new->ch.cap_info.rank = sdram_params->ch.cap_info.rank;
3166 sdram_params_new->ch.cap_info.bw = sdram_params->ch.cap_info.bw;
3167
3168 pctl_modify_trfc(&sdram_params_new->pctl_regs,
3169 &sdram_params->ch.cap_info, dramtype, freq);
3170 pre_set_rate(dram, sdram_params_new, dst_fsp, dst_fsp_lp4);
3171
3172 while ((readl(pctl_base + DDR_PCTL2_STAT) &
3173 PCTL2_OPERATING_MODE_MASK) ==
3174 PCTL2_OPERATING_MODE_SR)
3175 continue;
3176
3177 dest_dll_off = 0;
3178 dst_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3179 DDR_PCTL2_INIT3);
3180 if ((dramtype == DDR3 && (dst_init3 & 1)) ||
3181 (dramtype == DDR4 && !(dst_init3 & 1)))
3182 dest_dll_off = 1;
3183
3184 cur_fsp = readl(pctl_base + DDR_PCTL2_MSTR2) & 0x3;
3185 cur_init3 = readl(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) +
3186 DDR_PCTL2_INIT3);
3187 cur_init3 &= PCTL2_MR_MASK;
3188 cur_dll_off = 1;
3189 if ((dramtype == DDR3 && !(cur_init3 & 1)) ||
3190 (dramtype == DDR4 && (cur_init3 & 1)))
3191 cur_dll_off = 0;
3192
3193 if (!cur_dll_off) {
3194 if (dramtype == DDR3)
3195 cur_init3 |= 1;
3196 else
3197 cur_init3 &= ~1;
3198 pctl_write_mr(dram->pctl, 2, 1, cur_init3, dramtype);
3199 }
3200
3201 setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3202 PCTL2_DIS_AUTO_REFRESH);
3203 update_refresh_reg(dram);
3204
3205 enter_sr(dram, 1);
3206
3207 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3208 PMUGRF_CON_DDRPHY_BUFFEREN_EN,
3209 &dram->pmugrf->soc_con[0]);
3210 sw_set_req(dram);
3211 clrbits_le32(pctl_base + DDR_PCTL2_DFIMISC,
3212 PCTL2_DFI_INIT_COMPLETE_EN);
3213 sw_set_ack(dram);
3214
3215 sw_set_req(dram);
3216 if ((dramtype == DDR3 || dramtype == DDR4) && dest_dll_off)
3217 setbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3218 else
3219 clrbits_le32(pctl_base + DDR_PCTL2_MSTR, PCTL2_DLL_OFF_MODE);
3220
3221 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(cur_fsp) + DDR_PCTL2_ZQCTL0,
3222 PCTL2_DIS_SRX_ZQCL);
3223 setbits_le32(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_ZQCTL0,
3224 PCTL2_DIS_SRX_ZQCL);
3225 sw_set_ack(dram);
3226
3227 writel(DDR_MSCH_EN_MASK | (0x1 << DDR_MSCH_EN_SHIFT),
3228 &dram->cru->clkgate_con[21]);
3229 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3230 (0x1 << CLK_DDR_UPCTL_EN_SHIFT) |
3231 (0x1 << ACLK_DDR_UPCTL_EN_SHIFT),
3232 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3233
3234 clrbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3235 rkclk_set_dpll(dram, freq * MHz / 2);
3236 phy_pll_set(dram, freq * MHz, 0);
3237 phy_pll_set(dram, freq * MHz, 1);
3238 setbits_le32(PHY_REG(phy_base, 0), ANALOG_DERESET | DIGITAL_DERESET);
3239
3240 writel(PMUGRF_CON_DDRPHY_BUFFEREN_MASK |
3241 PMUGRF_CON_DDRPHY_BUFFEREN_DIS,
3242 &dram->pmugrf->soc_con[0]);
3243 writel(DDR_MSCH_EN_MASK | (0x0 << DDR_MSCH_EN_SHIFT),
3244 &dram->cru->clkgate_con[21]);
3245 writel(CLK_DDR_UPCTL_EN_MASK | ACLK_DDR_UPCTL_EN_MASK |
3246 (0x0 << CLK_DDR_UPCTL_EN_SHIFT) |
3247 (0x0 << ACLK_DDR_UPCTL_EN_SHIFT),
3248 BUS_SGRF_BASE_ADDR + SGRF_SOC_CON12);
3249 while ((readl(pctl_base + DDR_PCTL2_DFISTAT) &
3250 PCTL2_DFI_INIT_COMPLETE) != PCTL2_DFI_INIT_COMPLETE)
3251 continue;
3252
3253 sw_set_req(dram);
3254 setbits_le32(pctl_base + DDR_PCTL2_MSTR, 0x1 << 29);
3255 clrsetbits_le32(pctl_base + DDR_PCTL2_MSTR2, 0x3, dst_fsp);
3256 sw_set_ack(dram);
3257 update_refresh_reg(dram);
3258 clrsetbits_le32(PHY_REG(phy_base, 0xc), 0x3 << 2, dst_fsp << 2);
3259
3260 enter_sr(dram, 0);
3261
3262 setbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3263 clrbits_le32(PHY_REG(phy_base, 0x71), 1 << 5);
3264
3265 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) + DDR_PCTL2_INIT4);
3266 if (dramtype == LPDDR3) {
3267 pctl_write_mr(dram->pctl, 3, 1,
3268 (dst_init3 >> PCTL2_LPDDR234_MR1_SHIFT) &
3269 PCTL2_MR_MASK,
3270 dramtype);
3271 pctl_write_mr(dram->pctl, 3, 2, dst_init3 & PCTL2_MR_MASK,
3272 dramtype);
3273 pctl_write_mr(dram->pctl, 3, 3,
3274 (mr_tmp >> PCTL2_LPDDR234_MR3_SHIFT) &
3275 PCTL2_MR_MASK,
3276 dramtype);
3277 pctl_write_mr(dram->pctl, 3, 11, lp3_odt_value, dramtype);
3278 } else if ((dramtype == DDR3) || (dramtype == DDR4)) {
3279 pctl_write_mr(dram->pctl, 3, 1, dst_init3 & PCTL2_MR_MASK,
3280 dramtype);
3281 if (!dest_dll_off) {
3282 pctl_write_mr(dram->pctl, 3, 0,
3283 ((dst_init3 >> PCTL2_DDR34_MR0_SHIFT) &
3284 PCTL2_MR_MASK) | DDR3_DLL_RESET,
3285 dramtype);
3286 udelay(2);
3287 }
3288 pctl_write_mr(dram->pctl, 3, 0,
3289 (dst_init3 >> PCTL2_DDR34_MR0_SHIFT &
3290 PCTL2_MR_MASK) & (~DDR3_DLL_RESET),
3291 dramtype);
3292 pctl_write_mr(dram->pctl, 3, 2,
3293 ((mr_tmp >> PCTL2_DDR34_MR2_SHIFT) &
3294 PCTL2_MR_MASK), dramtype);
3295 if (dramtype == DDR4) {
3296 pctl_write_mr(dram->pctl, 3, 3, mr_tmp & PCTL2_MR_MASK,
3297 dramtype);
3298 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3299 DDR_PCTL2_INIT6);
3300 pctl_write_mr(dram->pctl, 3, 4,
3301 (mr_tmp >> PCTL2_DDR4_MR4_SHIFT) &
3302 PCTL2_MR_MASK,
3303 dramtype);
3304 pctl_write_mr(dram->pctl, 3, 5,
3305 mr_tmp >> PCTL2_DDR4_MR5_SHIFT &
3306 PCTL2_MR_MASK,
3307 dramtype);
3308
3309 mr_tmp = readl(pctl_base + UMCTL2_REGS_FREQ(dst_fsp) +
3310 DDR_PCTL2_INIT7);
3311 pctl_write_mr(dram->pctl, 3, 6,
3312 mr_tmp >> PCTL2_DDR4_MR6_SHIFT &
3313 PCTL2_MR_MASK,
3314 dramtype);
3315 }
3316 } else if (dramtype == LPDDR4) {
3317 pctl_write_mr(dram->pctl, 3, 13,
3318 ((mr_tmp >> PCTL2_LPDDR4_MR13_SHIFT &
3319 PCTL2_MR_MASK) & (~(BIT(7)))) |
3320 dst_fsp_lp4 << 7, dramtype);
3321 }
3322 clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3,
3323 PCTL2_DIS_AUTO_REFRESH);
3324 update_refresh_reg(dram);
3325
3326 /* training */
3327 high_freq_training(dram, sdram_params_new, dst_fsp);
3328 low_power_update(dram, lp_stat);
3329
3330 save_fsp_param(dram, dst_fsp, sdram_params_new);
3331}
3332
3333static void ddr_set_rate_for_fsp(struct dram_info *dram,
3334 struct rv1126_sdram_params *sdram_params)
3335{
3336 struct ddr2_3_4_lp2_3_info *ddr_info;
3337 u32 f0;
3338 u32 dramtype = sdram_params->base.dramtype;
3339 u32 f1, f2, f3;
3340
3341 ddr_info = get_ddr_drv_odt_info(dramtype);
3342 if (!ddr_info)
3343 return;
3344
3345 f0 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F0_SHIFT) &
3346 DDR_FREQ_MASK;
3347
3348 memset((void *)FSP_PARAM_STORE_ADDR, 0, sizeof(fsp_param));
3349 memset((void *)&fsp_param, 0, sizeof(fsp_param));
3350
3351 f1 = (ddr_info->ddr_freq0_1 >> DDR_FREQ_F1_SHIFT) &
3352 DDR_FREQ_MASK;
3353 f2 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F2_SHIFT) &
3354 DDR_FREQ_MASK;
3355 f3 = (ddr_info->ddr_freq2_3 >> DDR_FREQ_F3_SHIFT) &
3356 DDR_FREQ_MASK;
3357
3358 if (get_wrlvl_val(dram, sdram_params))
3359 printascii("get wrlvl value fail\n");
3360
Jagan Teki43241e02022-12-14 23:20:54 +05303361 if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
3362 printascii("change to: ");
3363 printdec(f1);
3364 printascii("MHz\n");
3365 }
Jagan Tekid0af73c2022-12-14 23:20:53 +05303366 ddr_set_rate(&dram_info, sdram_params, f1,
3367 sdram_params->base.ddr_freq, 1, 1, 1);
Jagan Teki43241e02022-12-14 23:20:54 +05303368
3369 if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
3370 printascii("change to: ");
3371 printdec(f2);
3372 printascii("MHz\n");
3373 }
Jagan Tekid0af73c2022-12-14 23:20:53 +05303374 ddr_set_rate(&dram_info, sdram_params, f2, f1, 2, 0, 1);
Jagan Teki43241e02022-12-14 23:20:54 +05303375
3376 if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
3377 printascii("change to: ");
3378 printdec(f3);
3379 printascii("MHz\n");
3380 }
Jagan Tekid0af73c2022-12-14 23:20:53 +05303381 ddr_set_rate(&dram_info, sdram_params, f3, f2, 3, 1, 1);
Jagan Teki43241e02022-12-14 23:20:54 +05303382
3383 if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG)) {
3384 printascii("change to: ");
3385 printdec(f0);
3386 printascii("MHz(final freq)\n");
3387 }
Jagan Tekid0af73c2022-12-14 23:20:53 +05303388 ddr_set_rate(&dram_info, sdram_params, f0, f3, 0, 0, 1);
3389}
3390
3391int get_uart_config(void)
3392{
3393 struct sdram_head_info_index_v2 *index =
3394 (struct sdram_head_info_index_v2 *)common_info;
3395 struct global_info *gbl_info;
3396
3397 gbl_info = (struct global_info *)((void *)common_info +
3398 index->global_index.offset * 4);
3399
3400 return gbl_info->uart_info;
3401}
3402
3403/* return: 0 = success, other = fail */
3404static int rv1126_dmc_init(struct udevice *dev)
3405{
3406 struct rv1126_sdram_params *sdram_params;
3407 int ret = 0;
3408 struct sdram_head_info_index_v2 *index =
3409 (struct sdram_head_info_index_v2 *)common_info;
3410 struct global_info *gbl_info;
3411
3412 dram_info.phy = (void *)DDR_PHY_BASE_ADDR;
3413 dram_info.pctl = (void *)UPCTL2_BASE_ADDR;
3414 dram_info.grf = (void *)GRF_BASE_ADDR;
3415 dram_info.cru = (void *)CRU_BASE_ADDR;
3416 dram_info.msch = (void *)SERVER_MSCH_BASE_ADDR;
3417 dram_info.ddrgrf = (void *)DDR_GRF_BASE_ADDR;
3418 dram_info.pmugrf = (void *)PMU_GRF_BASE_ADDR;
3419
3420#ifdef CONFIG_ROCKCHIP_DRAM_EXTENDED_TEMP_SUPPORT
3421 printascii("extended temp support\n");
3422#endif
3423 if (index->version_info != 2 ||
3424 (index->global_index.size != sizeof(struct global_info) / 4) ||
3425 (index->ddr3_index.size !=
3426 sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3427 (index->ddr4_index.size !=
3428 sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3429 (index->lp3_index.size !=
3430 sizeof(struct ddr2_3_4_lp2_3_info) / 4) ||
3431 (index->lp4_index.size != (sizeof(struct lp4_info) / 4)) ||
3432 (index->lp4x_index.size != (sizeof(struct lp4_info) / 4)) ||
3433 index->global_index.offset == 0 ||
3434 index->ddr3_index.offset == 0 ||
3435 index->ddr4_index.offset == 0 ||
3436 index->lp3_index.offset == 0 ||
3437 index->lp4_index.offset == 0 ||
3438 index->lp4x_index.offset == 0) {
3439 printascii("common info error\n");
3440 goto error;
3441 }
3442
3443 gbl_info = (struct global_info *)((void *)common_info +
3444 index->global_index.offset * 4);
3445
3446 dram_info.sr_idle = SR_INFO(gbl_info->sr_pd_info);
3447 dram_info.pd_idle = PD_INFO(gbl_info->sr_pd_info);
3448
3449 sdram_params = &sdram_configs[0];
3450 if (sdram_params->base.dramtype == DDR3 ||
3451 sdram_params->base.dramtype == DDR4) {
3452 if (DDR_2T_INFO(gbl_info->info_2t))
3453 sdram_params->pctl_regs.pctl[0][1] |= 0x1 << 10;
3454 else
3455 sdram_params->pctl_regs.pctl[0][1] &=
3456 ~(0x1 << 10);
3457 }
3458 ret = sdram_init_detect(&dram_info, sdram_params);
3459 if (ret) {
3460 sdram_print_dram_type(sdram_params->base.dramtype);
3461 printascii(", ");
3462 printdec(sdram_params->base.ddr_freq);
3463 printascii("MHz\n");
3464 goto error;
3465 }
3466 print_ddr_info(sdram_params);
3467#if defined(CONFIG_CMD_DDR_TEST_TOOL)
3468 init_rw_trn_result_struct(&rw_trn_result, dram_info.phy,
3469 (u8)sdram_params->ch.cap_info.rank);
3470#endif
3471
3472 ddr_set_rate_for_fsp(&dram_info, sdram_params);
3473 copy_fsp_param_to_ddr();
3474
3475#if defined(CONFIG_CMD_DDR_TEST_TOOL)
3476 save_rw_trn_result_to_ddr(&rw_trn_result);
3477#endif
3478
Jagan Teki43241e02022-12-14 23:20:54 +05303479 if (IS_ENABLED(CONFIG_RAM_ROCKCHIP_DEBUG))
3480 printascii("out\n");
Jagan Tekid0af73c2022-12-14 23:20:53 +05303481
3482 return ret;
3483error:
3484 printascii("error\n");
3485 return (-1);
3486}
3487
3488#endif
3489
3490static int rv1126_dmc_probe(struct udevice *dev)
3491{
3492#if defined(CONFIG_TPL_BUILD) || \
3493 (!defined(CONFIG_TPL) && defined(CONFIG_SPL_BUILD))
3494 if (rv1126_dmc_init(dev))
3495 return 0;
3496#else
3497 struct dram_info *priv = dev_get_priv(dev);
3498
3499 priv->pmugrf = syscon_get_first_range(ROCKCHIP_SYSCON_PMUGRF);
3500 debug("%s: grf=%p\n", __func__, priv->pmugrf);
3501 priv->info.base = CFG_SYS_SDRAM_BASE;
3502 priv->info.size =
3503 rockchip_sdram_size((phys_addr_t)&priv->pmugrf->os_reg[2]);
3504#endif
3505 return 0;
3506}
3507
3508static int rv1126_dmc_get_info(struct udevice *dev, struct ram_info *info)
3509{
3510 struct dram_info *priv = dev_get_priv(dev);
3511
3512 *info = priv->info;
3513
3514 return 0;
3515}
3516
3517static struct ram_ops rv1126_dmc_ops = {
3518 .get_info = rv1126_dmc_get_info,
3519};
3520
3521static const struct udevice_id rv1126_dmc_ids[] = {
3522 { .compatible = "rockchip,rv1126-dmc" },
3523 { }
3524};
3525
3526U_BOOT_DRIVER(dmc_rv1126) = {
3527 .name = "rockchip_rv1126_dmc",
3528 .id = UCLASS_RAM,
3529 .of_match = rv1126_dmc_ids,
3530 .ops = &rv1126_dmc_ops,
3531 .probe = rv1126_dmc_probe,
3532 .priv_auto = sizeof(struct dram_info),
3533};