blob: 2cce381c9df1384ba8d41be3e38094fcae680138 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Ian Campbell2f1afcc2014-05-05 11:52:25 +01002/*
3 * sunxi DRAM controller initialization
4 * (C) Copyright 2012 Henrik Nordstrom <henrik@henriknordstrom.net>
5 * (C) Copyright 2013 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
6 *
7 * Based on sun4i Linux kernel sources mach-sunxi/pm/standby/dram*.c
Priit Laescb9325d2018-10-23 20:20:28 +03008 * and earlier U-Boot Allwinner A10 SPL work
Ian Campbell2f1afcc2014-05-05 11:52:25 +01009 *
10 * (C) Copyright 2007-2012
11 * Allwinner Technology Co., Ltd. <www.allwinnertech.com>
12 * Berg Xing <bergxing@allwinnertech.com>
13 * Tom Cubie <tangliang@allwinnertech.com>
Ian Campbell2f1afcc2014-05-05 11:52:25 +010014 */
15
16/*
17 * Unfortunately the only documentation we have on the sun7i DRAM
18 * controller is Allwinner boot0 + boot1 code, and that code uses
19 * magic numbers & shifts with no explanations. Hence this code is
20 * rather undocumented and full of magic.
21 */
22
Simon Glass8e16b1e2019-12-28 10:45:05 -070023#include <init.h>
Ian Campbell2f1afcc2014-05-05 11:52:25 +010024#include <asm/io.h>
25#include <asm/arch/clock.h>
26#include <asm/arch/dram.h>
27#include <asm/arch/timer.h>
28#include <asm/arch/sys_proto.h>
Simon Glassdbd79542020-05-10 11:40:11 -060029#include <linux/delay.h>
Ian Campbell2f1afcc2014-05-05 11:52:25 +010030
31#define CPU_CFG_CHIP_VER(n) ((n) << 6)
32#define CPU_CFG_CHIP_VER_MASK CPU_CFG_CHIP_VER(0x3)
33#define CPU_CFG_CHIP_REV_A 0x0
34#define CPU_CFG_CHIP_REV_C1 0x1
35#define CPU_CFG_CHIP_REV_C2 0x2
36#define CPU_CFG_CHIP_REV_B 0x3
37
38/*
Siarhei Siamashka72ed8692014-08-03 05:32:45 +030039 * Wait up to 1s for mask to be clear in given reg.
40 */
41static inline void await_bits_clear(u32 *reg, u32 mask)
42{
Hans de Goede36b25702014-12-08 13:38:21 +010043 mctl_await_completion(reg, mask, 0);
Siarhei Siamashka72ed8692014-08-03 05:32:45 +030044}
45
46/*
47 * Wait up to 1s for mask to be set in given reg.
48 */
49static inline void await_bits_set(u32 *reg, u32 mask)
50{
Hans de Goede36b25702014-12-08 13:38:21 +010051 mctl_await_completion(reg, mask, mask);
Siarhei Siamashka72ed8692014-08-03 05:32:45 +030052}
53
54/*
Siarhei Siamashkaa1d9f032014-08-03 05:32:41 +030055 * This performs the external DRAM reset by driving the RESET pin low and
56 * then high again. According to the DDR3 spec, the RESET pin needs to be
57 * kept low for at least 200 us.
58 */
Ian Campbell2f1afcc2014-05-05 11:52:25 +010059static void mctl_ddr3_reset(void)
60{
61 struct sunxi_dram_reg *dram =
62 (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
63
Ian Campbell8f32aaa2014-10-24 21:20:47 +010064#ifdef CONFIG_MACH_SUN4I
Hans de Goede3ab9c232014-06-09 11:36:57 +020065 struct sunxi_timer_reg *timer =
66 (struct sunxi_timer_reg *)SUNXI_TIMER_BASE;
67 u32 reg_val;
68
69 writel(0, &timer->cpu_cfg);
70 reg_val = readl(&timer->cpu_cfg);
71
72 if ((reg_val & CPU_CFG_CHIP_VER_MASK) !=
73 CPU_CFG_CHIP_VER(CPU_CFG_CHIP_REV_A)) {
74 setbits_le32(&dram->mcr, DRAM_MCR_RESET);
Siarhei Siamashkaa1d9f032014-08-03 05:32:41 +030075 udelay(200);
Hans de Goede3ab9c232014-06-09 11:36:57 +020076 clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
77 } else
78#endif
79 {
80 clrbits_le32(&dram->mcr, DRAM_MCR_RESET);
Siarhei Siamashkaa1d9f032014-08-03 05:32:41 +030081 udelay(200);
Hans de Goede3ab9c232014-06-09 11:36:57 +020082 setbits_le32(&dram->mcr, DRAM_MCR_RESET);
83 }
Siarhei Siamashkace4d21c2014-08-03 05:32:42 +030084 /* After the RESET pin is de-asserted, the DDR3 spec requires to wait
85 * for additional 500 us before driving the CKE pin (Clock Enable)
86 * high. The duration of this delay can be configured in the SDR_IDCR
87 * (Initialization Delay Configuration Register) and applied
88 * automatically by the DRAM controller during the DDR3 initialization
89 * step. But SDR_IDCR has limited range on sun4i/sun5i hardware and
90 * can't provide sufficient delay at DRAM clock frequencies higher than
91 * 524 MHz (while Allwinner A13 supports DRAM clock frequency up to
92 * 533 MHz according to the datasheet). Additionally, there is no
93 * official documentation for the SDR_IDCR register anywhere, and
94 * there is always a chance that we are interpreting it wrong.
95 * Better be safe than sorry, so add an explicit delay here. */
96 udelay(500);
Ian Campbell2f1afcc2014-05-05 11:52:25 +010097}
98
99static void mctl_set_drive(void)
100{
101 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
102
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100103#ifdef CONFIG_MACH_SUN7I
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100104 clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3) | (0x3 << 28),
Hans de Goede3ab9c232014-06-09 11:36:57 +0200105#else
106 clrsetbits_le32(&dram->mcr, DRAM_MCR_MODE_NORM(0x3),
107#endif
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100108 DRAM_MCR_MODE_EN(0x3) |
109 0xffc);
110}
111
112static void mctl_itm_disable(void)
113{
114 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
115
116 clrsetbits_le32(&dram->ccr, DRAM_CCR_INIT, DRAM_CCR_ITM_OFF);
117}
118
119static void mctl_itm_enable(void)
120{
121 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
122
123 clrbits_le32(&dram->ccr, DRAM_CCR_ITM_OFF);
124}
125
Siarhei Siamashkab1ace772014-08-03 05:32:51 +0300126static void mctl_itm_reset(void)
127{
128 mctl_itm_disable();
129 udelay(1); /* ITM reset needs a bit of delay */
130 mctl_itm_enable();
131 udelay(1);
132}
133
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100134static void mctl_enable_dll0(u32 phase)
135{
136 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
137
138 clrsetbits_le32(&dram->dllcr[0], 0x3f << 6,
139 ((phase >> 16) & 0x3f) << 6);
140 clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET, DRAM_DLLCR_DISABLE);
141 udelay(2);
142
143 clrbits_le32(&dram->dllcr[0], DRAM_DLLCR_NRESET | DRAM_DLLCR_DISABLE);
144 udelay(22);
145
146 clrsetbits_le32(&dram->dllcr[0], DRAM_DLLCR_DISABLE, DRAM_DLLCR_NRESET);
147 udelay(22);
148}
149
Siarhei Siamashkac4f0aa52014-08-03 05:32:50 +0300150/* Get the number of DDR byte lanes */
151static u32 mctl_get_number_of_lanes(void)
152{
153 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
154 if ((readl(&dram->dcr) & DRAM_DCR_BUS_WIDTH_MASK) ==
155 DRAM_DCR_BUS_WIDTH(DRAM_DCR_BUS_WIDTH_32BIT))
156 return 4;
157 else
158 return 2;
159}
160
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100161/*
162 * Note: This differs from pm/standby in that it checks the bus width
163 */
164static void mctl_enable_dllx(u32 phase)
165{
166 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
Siarhei Siamashkac4f0aa52014-08-03 05:32:50 +0300167 u32 i, number_of_lanes;
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100168
Siarhei Siamashkac4f0aa52014-08-03 05:32:50 +0300169 number_of_lanes = mctl_get_number_of_lanes();
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100170
Siarhei Siamashkac4f0aa52014-08-03 05:32:50 +0300171 for (i = 1; i <= number_of_lanes; i++) {
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100172 clrsetbits_le32(&dram->dllcr[i], 0xf << 14,
173 (phase & 0xf) << 14);
174 clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET,
175 DRAM_DLLCR_DISABLE);
176 phase >>= 4;
177 }
178 udelay(2);
179
Siarhei Siamashkac4f0aa52014-08-03 05:32:50 +0300180 for (i = 1; i <= number_of_lanes; i++)
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100181 clrbits_le32(&dram->dllcr[i], DRAM_DLLCR_NRESET |
182 DRAM_DLLCR_DISABLE);
183 udelay(22);
184
Siarhei Siamashkac4f0aa52014-08-03 05:32:50 +0300185 for (i = 1; i <= number_of_lanes; i++)
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100186 clrsetbits_le32(&dram->dllcr[i], DRAM_DLLCR_DISABLE,
187 DRAM_DLLCR_NRESET);
188 udelay(22);
189}
190
191static u32 hpcr_value[32] = {
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100192#ifdef CONFIG_MACH_SUN5I
Hans de Goede8c1c7822014-06-09 11:36:58 +0200193 0, 0, 0, 0,
194 0, 0, 0, 0,
195 0, 0, 0, 0,
196 0, 0, 0, 0,
197 0x1031, 0x1031, 0x0735, 0x1035,
198 0x1035, 0x0731, 0x1031, 0,
199 0x0301, 0x0301, 0x0301, 0x0301,
200 0x0301, 0x0301, 0x0301, 0
201#endif
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100202#ifdef CONFIG_MACH_SUN4I
Hans de Goede3ab9c232014-06-09 11:36:57 +0200203 0x0301, 0x0301, 0x0301, 0x0301,
204 0x0301, 0x0301, 0, 0,
205 0, 0, 0, 0,
206 0, 0, 0, 0,
207 0x1031, 0x1031, 0x0735, 0x5031,
208 0x1035, 0x0731, 0x1031, 0x0735,
209 0x1035, 0x1031, 0x0731, 0x1035,
210 0x1031, 0x0301, 0x0301, 0x0731
211#endif
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100212#ifdef CONFIG_MACH_SUN7I
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100213 0x0301, 0x0301, 0x0301, 0x0301,
214 0x0301, 0x0301, 0x0301, 0x0301,
215 0, 0, 0, 0,
216 0, 0, 0, 0,
217 0x1031, 0x1031, 0x0735, 0x1035,
218 0x1035, 0x0731, 0x1031, 0x0735,
219 0x1035, 0x1031, 0x0731, 0x1035,
220 0x0001, 0x1031, 0, 0x1031
221 /* last row differs from boot0 source table
222 * 0x1031, 0x0301, 0x0301, 0x0731
223 * but boot0 code skips #28 and #30, and sets #29 and #31 to the
224 * value from #28 entry (0x1031)
225 */
226#endif
227};
228
229static void mctl_configure_hostport(void)
230{
231 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
232 u32 i;
233
234 for (i = 0; i < 32; i++)
235 writel(hpcr_value[i], &dram->hpcr[i]);
236}
237
Siarhei Siamashka586757a2014-08-03 05:32:47 +0300238static void mctl_setup_dram_clock(u32 clk, u32 mbus_clk)
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100239{
240 u32 reg_val;
241 struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
Hans de Goede1c211552014-10-22 14:48:38 +0200242 u32 pll5p_clk, pll6x_clk;
243 u32 pll5p_div, pll6x_div;
Siarhei Siamashka586757a2014-08-03 05:32:47 +0300244 u32 pll5p_rate, pll6x_rate;
Siarhei Siamashka586757a2014-08-03 05:32:47 +0300245
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100246 /* setup DRAM PLL */
247 reg_val = readl(&ccm->pll5_cfg);
248 reg_val &= ~CCM_PLL5_CTRL_M_MASK; /* set M to 0 (x1) */
249 reg_val &= ~CCM_PLL5_CTRL_K_MASK; /* set K to 0 (x1) */
250 reg_val &= ~CCM_PLL5_CTRL_N_MASK; /* set N to 0 (x0) */
251 reg_val &= ~CCM_PLL5_CTRL_P_MASK; /* set P to 0 (x1) */
Hans de Goede05e5bcb2014-10-22 14:56:36 +0200252#ifdef CONFIG_OLD_SUNXI_KERNEL_COMPAT
253 /* Old kernels are hardcoded to P=1 (divide by 2) */
254 reg_val |= CCM_PLL5_CTRL_P(1);
255#endif
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100256 if (clk >= 540 && clk < 552) {
Hans de Goede1c211552014-10-22 14:48:38 +0200257 /* dram = 540MHz */
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100258 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
259 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
260 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(15));
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100261 } else if (clk >= 512 && clk < 528) {
Hans de Goede1c211552014-10-22 14:48:38 +0200262 /* dram = 512MHz */
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100263 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
264 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(4));
265 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(16));
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100266 } else if (clk >= 496 && clk < 504) {
Hans de Goede1c211552014-10-22 14:48:38 +0200267 /* dram = 496MHz */
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100268 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(3));
269 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
270 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(31));
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100271 } else if (clk >= 468 && clk < 480) {
Hans de Goede1c211552014-10-22 14:48:38 +0200272 /* dram = 468MHz */
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100273 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
274 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
275 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(13));
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100276 } else if (clk >= 396 && clk < 408) {
Hans de Goede1c211552014-10-22 14:48:38 +0200277 /* dram = 396MHz */
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100278 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
279 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(3));
280 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(11));
Wolfgang Denk62fb2b42021-09-27 17:42:39 +0200281 } else {
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100282 /* any other frequency that is a multiple of 24 */
283 reg_val |= CCM_PLL5_CTRL_M(CCM_PLL5_CTRL_M_X(2));
284 reg_val |= CCM_PLL5_CTRL_K(CCM_PLL5_CTRL_K_X(2));
285 reg_val |= CCM_PLL5_CTRL_N(CCM_PLL5_CTRL_N_X(clk / 24));
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100286 }
287 reg_val &= ~CCM_PLL5_CTRL_VCO_GAIN; /* PLL VCO Gain off */
288 reg_val |= CCM_PLL5_CTRL_EN; /* PLL On */
289 writel(reg_val, &ccm->pll5_cfg);
290 udelay(5500);
291
292 setbits_le32(&ccm->pll5_cfg, CCM_PLL5_CTRL_DDR_CLK);
293
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100294#if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN7I)
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100295 /* reset GPS */
296 clrbits_le32(&ccm->gps_clk_cfg, CCM_GPS_CTRL_RESET | CCM_GPS_CTRL_GATE);
297 setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
298 udelay(1);
299 clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_GPS);
300#endif
301
302 /* setup MBUS clock */
Siarhei Siamashka586757a2014-08-03 05:32:47 +0300303 if (!mbus_clk)
304 mbus_clk = 300;
Hans de Goede1c211552014-10-22 14:48:38 +0200305
306 /* PLL5P and PLL6 are the potential clock sources for MBUS */
307 pll6x_clk = clock_get_pll6() / 1000000;
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100308#ifdef CONFIG_MACH_SUN7I
Hans de Goede1c211552014-10-22 14:48:38 +0200309 pll6x_clk *= 2; /* sun7i uses PLL6*2, sun5i uses just PLL6 */
310#endif
311 pll5p_clk = clock_get_pll5p() / 1000000;
Siarhei Siamashka586757a2014-08-03 05:32:47 +0300312 pll6x_div = DIV_ROUND_UP(pll6x_clk, mbus_clk);
313 pll5p_div = DIV_ROUND_UP(pll5p_clk, mbus_clk);
314 pll6x_rate = pll6x_clk / pll6x_div;
315 pll5p_rate = pll5p_clk / pll5p_div;
316
317 if (pll6x_div <= 16 && pll6x_rate > pll5p_rate) {
318 /* use PLL6 as the MBUS clock source */
319 reg_val = CCM_MBUS_CTRL_GATE |
320 CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL6) |
321 CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
322 CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll6x_div));
323 } else if (pll5p_div <= 16) {
324 /* use PLL5P as the MBUS clock source */
325 reg_val = CCM_MBUS_CTRL_GATE |
326 CCM_MBUS_CTRL_CLK_SRC(CCM_MBUS_CTRL_CLK_SRC_PLL5) |
327 CCM_MBUS_CTRL_N(CCM_MBUS_CTRL_N_X(1)) |
328 CCM_MBUS_CTRL_M(CCM_MBUS_CTRL_M_X(pll5p_div));
329 } else {
330 panic("Bad mbus_clk\n");
331 }
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100332 writel(reg_val, &ccm->mbus_clk_cfg);
333
334 /*
335 * open DRAMC AHB & DLL register clock
336 * close it first
337 */
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100338#if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100339 clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
Hans de Goede3ab9c232014-06-09 11:36:57 +0200340#else
341 clrbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
342#endif
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100343 udelay(22);
344
345 /* then open it */
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100346#if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100347 setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM | CCM_AHB_GATE_DLL);
Hans de Goede3ab9c232014-06-09 11:36:57 +0200348#else
349 setbits_le32(&ccm->ahb_gate0, CCM_AHB_GATE_SDRAM);
350#endif
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100351 udelay(22);
352}
353
Siarhei Siamashkab1ace772014-08-03 05:32:51 +0300354/*
355 * The data from rslrX and rdgrX registers (X=rank) is stored
356 * in a single 32-bit value using the following format:
357 * bits [31:26] - DQS gating system latency for byte lane 3
358 * bits [25:24] - DQS gating phase select for byte lane 3
359 * bits [23:18] - DQS gating system latency for byte lane 2
360 * bits [17:16] - DQS gating phase select for byte lane 2
361 * bits [15:10] - DQS gating system latency for byte lane 1
362 * bits [ 9:8 ] - DQS gating phase select for byte lane 1
363 * bits [ 7:2 ] - DQS gating system latency for byte lane 0
364 * bits [ 1:0 ] - DQS gating phase select for byte lane 0
365 */
366static void mctl_set_dqs_gating_delay(int rank, u32 dqs_gating_delay)
367{
368 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
369 u32 lane, number_of_lanes = mctl_get_number_of_lanes();
370 /* rank0 gating system latency (3 bits per lane: cycles) */
371 u32 slr = readl(rank == 0 ? &dram->rslr0 : &dram->rslr1);
372 /* rank0 gating phase select (2 bits per lane: 90, 180, 270, 360) */
373 u32 dgr = readl(rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
374 for (lane = 0; lane < number_of_lanes; lane++) {
375 u32 tmp = dqs_gating_delay >> (lane * 8);
376 slr &= ~(7 << (lane * 3));
377 slr |= ((tmp >> 2) & 7) << (lane * 3);
378 dgr &= ~(3 << (lane * 2));
379 dgr |= (tmp & 3) << (lane * 2);
380 }
381 writel(slr, rank == 0 ? &dram->rslr0 : &dram->rslr1);
382 writel(dgr, rank == 0 ? &dram->rdgr0 : &dram->rdgr1);
383}
384
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100385static int dramc_scan_readpipe(void)
386{
387 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
388 u32 reg_val;
389
390 /* data training trigger */
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100391 clrbits_le32(&dram->csr, DRAM_CSR_FAILED);
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100392 setbits_le32(&dram->ccr, DRAM_CCR_DATA_TRAINING);
393
394 /* check whether data training process has completed */
Siarhei Siamashka72ed8692014-08-03 05:32:45 +0300395 await_bits_clear(&dram->ccr, DRAM_CCR_DATA_TRAINING);
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100396
397 /* check data training result */
398 reg_val = readl(&dram->csr);
399 if (reg_val & DRAM_CSR_FAILED)
400 return -1;
401
402 return 0;
403}
404
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100405static void dramc_clock_output_en(u32 on)
406{
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100407#if defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I)
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100408 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
409
410 if (on)
411 setbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
412 else
413 clrbits_le32(&dram->mcr, DRAM_MCR_DCLK_OUT);
414#endif
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100415#ifdef CONFIG_MACH_SUN4I
Hans de Goede3ab9c232014-06-09 11:36:57 +0200416 struct sunxi_ccm_reg *ccm = (struct sunxi_ccm_reg *)SUNXI_CCM_BASE;
417 if (on)
Hans de Goedead0dfc52014-11-09 12:24:55 +0100418 setbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
Hans de Goede3ab9c232014-06-09 11:36:57 +0200419 else
Hans de Goedead0dfc52014-11-09 12:24:55 +0100420 clrbits_le32(&ccm->dram_clk_gate, CCM_DRAM_CTRL_DCLK_OUT);
Hans de Goede3ab9c232014-06-09 11:36:57 +0200421#endif
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100422}
423
Siarhei Siamashkab41aa972014-08-03 05:32:52 +0300424/* tRFC in nanoseconds for different densities (from the DDR3 spec) */
425static const u16 tRFC_DDR3_table[6] = {
426 /* 256Mb 512Mb 1Gb 2Gb 4Gb 8Gb */
427 90, 90, 110, 160, 300, 350
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100428};
429
Siarhei Siamashkab41aa972014-08-03 05:32:52 +0300430static void dramc_set_autorefresh_cycle(u32 clk, u32 density)
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100431{
432 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
433 u32 tRFC, tREFI;
434
Siarhei Siamashkab41aa972014-08-03 05:32:52 +0300435 tRFC = (tRFC_DDR3_table[density] * clk + 999) / 1000;
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100436 tREFI = (7987 * clk) >> 10; /* <= 7.8us */
437
438 writel(DRAM_DRR_TREFI(tREFI) | DRAM_DRR_TRFC(tRFC), &dram->drr);
439}
440
Siarhei Siamashkaac7b6fc2014-08-03 05:32:53 +0300441/* Calculate the value for A11, A10, A9 bits in MR0 (write recovery) */
442static u32 ddr3_write_recovery(u32 clk)
443{
444 u32 twr_ns = 15; /* DDR3 spec says that it is 15ns for all speed bins */
445 u32 twr_ck = (twr_ns * clk + 999) / 1000;
446 if (twr_ck < 5)
447 return 1;
448 else if (twr_ck <= 8)
449 return twr_ck - 4;
450 else if (twr_ck <= 10)
451 return 5;
452 else
453 return 6;
454}
455
Siarhei Siamashka551bfb92014-08-03 05:32:40 +0300456/*
457 * If the dram->ppwrsctl (SDR_DPCR) register has the lowest bit set to 1, this
458 * means that DRAM is currently in self-refresh mode and retaining the old
459 * data. Since we have no idea what to do in this situation yet, just set this
460 * register to 0 and initialize DRAM in the same way as on any normal reboot
461 * (discarding whatever was stored there).
462 *
463 * Note: on sun7i hardware, the highest 16 bits need to be set to 0x1651 magic
464 * value for this write operation to have any effect. On sun5i hadware this
465 * magic value is not necessary. And on sun4i hardware the writes to this
466 * register seem to have no effect at all.
467 */
468static void mctl_disable_power_save(void)
469{
470 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
471 writel(0x16510000, &dram->ppwrsctl);
472}
473
Siarhei Siamashkace4d21c2014-08-03 05:32:42 +0300474/*
475 * After the DRAM is powered up or reset, the DDR3 spec requires to wait at
476 * least 500 us before driving the CKE pin (Clock Enable) high. The dram->idct
477 * (SDR_IDCR) register appears to configure this delay, which gets applied
478 * right at the time when the DRAM initialization is activated in the
479 * 'mctl_ddr3_initialize' function.
480 */
481static void mctl_set_cke_delay(void)
482{
483 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
484
485 /* The CKE delay is represented in DRAM clock cycles, multiplied by N
486 * (where N=2 for sun4i/sun5i and N=3 for sun7i). Here it is set to
487 * the maximum possible value 0x1ffff, just like in the Allwinner's
488 * boot0 bootloader. The resulting delay value is somewhere between
489 * ~0.4 ms (sun5i with 648 MHz DRAM clock speed) and ~1.1 ms (sun7i
490 * with 360 MHz DRAM clock speed). */
491 setbits_le32(&dram->idcr, 0x1ffff);
492}
493
494/*
495 * This triggers the DRAM initialization. It performs sending the mode registers
496 * to the DRAM among other things. Very likely the ZQCL command is also getting
497 * executed (to do the initial impedance calibration on the DRAM side of the
498 * wire). The memory controller and the PHY must be already configured before
499 * calling this function.
500 */
501static void mctl_ddr3_initialize(void)
502{
503 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
504 setbits_le32(&dram->ccr, DRAM_CCR_INIT);
Siarhei Siamashka72ed8692014-08-03 05:32:45 +0300505 await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
Siarhei Siamashkace4d21c2014-08-03 05:32:42 +0300506}
507
Siarhei Siamashka3ad063a2014-08-03 05:32:46 +0300508/*
509 * Perform impedance calibration on the DRAM controller side of the wire.
510 */
Hans de Goedeffdc05c2015-05-13 15:00:46 +0200511static void mctl_set_impedance(u32 zq, bool odt_en)
Siarhei Siamashka3ad063a2014-08-03 05:32:46 +0300512{
513 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
514 u32 reg_val;
515 u32 zprog = zq & 0xFF, zdata = (zq >> 8) & 0xFFFFF;
516
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100517#ifndef CONFIG_MACH_SUN7I
Siarhei Siamashka3ad063a2014-08-03 05:32:46 +0300518 /* Appears that some kind of automatically initiated default
519 * ZQ calibration is already in progress at this point on sun4i/sun5i
520 * hardware, but not on sun7i. So it is reasonable to wait for its
521 * completion before doing anything else. */
522 await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
523#endif
524
525 /* ZQ calibration is not really useful unless ODT is enabled */
526 if (!odt_en)
527 return;
528
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100529#ifdef CONFIG_MACH_SUN7I
Siarhei Siamashka3ad063a2014-08-03 05:32:46 +0300530 /* Enabling ODT in SDR_IOCR on sun7i hardware results in a deadlock
531 * unless bit 24 is set in SDR_ZQCR1. Not much is known about the
532 * SDR_ZQCR1 register, but there are hints indicating that it might
533 * be related to periodic impedance re-calibration. This particular
534 * magic value is borrowed from the Allwinner boot0 bootloader, and
535 * using it helps to avoid troubles */
536 writel((1 << 24) | (1 << 1), &dram->zqcr1);
537#endif
538
539 /* Needed at least for sun5i, because it does not self clear there */
540 clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
541
542 if (zdata) {
543 /* Set the user supplied impedance data */
544 reg_val = DRAM_ZQCR0_ZDEN | zdata;
545 writel(reg_val, &dram->zqcr0);
546 /* no need to wait, this takes effect immediately */
547 } else {
548 /* Do the calibration using the external resistor */
549 reg_val = DRAM_ZQCR0_ZCAL | DRAM_ZQCR0_IMP_DIV(zprog);
550 writel(reg_val, &dram->zqcr0);
551 /* Wait for the new impedance configuration to settle */
552 await_bits_set(&dram->zqsr, DRAM_ZQSR_ZDONE);
553 }
554
555 /* Needed at least for sun5i, because it does not self clear there */
556 clrbits_le32(&dram->zqcr0, DRAM_ZQCR0_ZCAL);
557
558 /* Set I/O configure register */
Hans de Goedeffdc05c2015-05-13 15:00:46 +0200559 writel(DRAM_IOCR_ODT_EN, &dram->iocr);
Siarhei Siamashka3ad063a2014-08-03 05:32:46 +0300560}
561
Siarhei Siamashkaea7cdd12014-08-03 05:32:54 +0300562static unsigned long dramc_init_helper(struct dram_para *para)
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100563{
564 struct sunxi_dram_reg *dram = (struct sunxi_dram_reg *)SUNXI_DRAMC_BASE;
565 u32 reg_val;
566 u32 density;
567 int ret_val;
568
Siarhei Siamashkab41aa972014-08-03 05:32:52 +0300569 /*
570 * only single rank DDR3 is supported by this code even though the
571 * hardware can theoretically support DDR2 and up to two ranks
572 */
573 if (para->type != DRAM_MEMORY_TYPE_DDR3 || para->rank_num != 1)
574 return 0;
575
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100576 /* setup DRAM relative clock */
Siarhei Siamashka586757a2014-08-03 05:32:47 +0300577 mctl_setup_dram_clock(para->clock, para->mbus_clock);
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100578
Hans de Goede8c1c7822014-06-09 11:36:58 +0200579 /* Disable any pad power save control */
Siarhei Siamashka551bfb92014-08-03 05:32:40 +0300580 mctl_disable_power_save();
Hans de Goede8c1c7822014-06-09 11:36:58 +0200581
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100582 mctl_set_drive();
583
584 /* dram clock off */
585 dramc_clock_output_en(0);
586
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100587#ifdef CONFIG_MACH_SUN4I
Hans de Goede3ab9c232014-06-09 11:36:57 +0200588 /* select dram controller 1 */
589 writel(DRAM_CSEL_MAGIC, &dram->csel);
590#endif
591
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100592 mctl_itm_disable();
593 mctl_enable_dll0(para->tpr3);
594
595 /* configure external DRAM */
Siarhei Siamashkab41aa972014-08-03 05:32:52 +0300596 reg_val = DRAM_DCR_TYPE_DDR3;
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100597 reg_val |= DRAM_DCR_IO_WIDTH(para->io_width >> 3);
598
599 if (para->density == 256)
600 density = DRAM_DCR_CHIP_DENSITY_256M;
601 else if (para->density == 512)
602 density = DRAM_DCR_CHIP_DENSITY_512M;
603 else if (para->density == 1024)
604 density = DRAM_DCR_CHIP_DENSITY_1024M;
605 else if (para->density == 2048)
606 density = DRAM_DCR_CHIP_DENSITY_2048M;
607 else if (para->density == 4096)
608 density = DRAM_DCR_CHIP_DENSITY_4096M;
609 else if (para->density == 8192)
610 density = DRAM_DCR_CHIP_DENSITY_8192M;
611 else
612 density = DRAM_DCR_CHIP_DENSITY_256M;
613
614 reg_val |= DRAM_DCR_CHIP_DENSITY(density);
615 reg_val |= DRAM_DCR_BUS_WIDTH((para->bus_width >> 3) - 1);
616 reg_val |= DRAM_DCR_RANK_SEL(para->rank_num - 1);
617 reg_val |= DRAM_DCR_CMD_RANK_ALL;
618 reg_val |= DRAM_DCR_MODE(DRAM_DCR_MODE_INTERLEAVE);
619 writel(reg_val, &dram->dcr);
620
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100621 dramc_clock_output_en(1);
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100622
Siarhei Siamashka3ad063a2014-08-03 05:32:46 +0300623 mctl_set_impedance(para->zq, para->odt_en);
624
Siarhei Siamashkace4d21c2014-08-03 05:32:42 +0300625 mctl_set_cke_delay();
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100626
Siarhei Siamashka551bfb92014-08-03 05:32:40 +0300627 mctl_ddr3_reset();
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100628
629 udelay(1);
630
Siarhei Siamashka72ed8692014-08-03 05:32:45 +0300631 await_bits_clear(&dram->ccr, DRAM_CCR_INIT);
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100632
633 mctl_enable_dllx(para->tpr3);
634
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100635 /* set refresh period */
Siarhei Siamashkab41aa972014-08-03 05:32:52 +0300636 dramc_set_autorefresh_cycle(para->clock, density);
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100637
638 /* set timing parameters */
639 writel(para->tpr0, &dram->tpr0);
640 writel(para->tpr1, &dram->tpr1);
641 writel(para->tpr2, &dram->tpr2);
642
Siarhei Siamashkab41aa972014-08-03 05:32:52 +0300643 reg_val = DRAM_MR_BURST_LENGTH(0x0);
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100644#if (defined(CONFIG_MACH_SUN5I) || defined(CONFIG_MACH_SUN7I))
Siarhei Siamashkab41aa972014-08-03 05:32:52 +0300645 reg_val |= DRAM_MR_POWER_DOWN;
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100646#endif
Siarhei Siamashkab41aa972014-08-03 05:32:52 +0300647 reg_val |= DRAM_MR_CAS_LAT(para->cas - 4);
Siarhei Siamashkaac7b6fc2014-08-03 05:32:53 +0300648 reg_val |= DRAM_MR_WRITE_RECOVERY(ddr3_write_recovery(para->clock));
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100649 writel(reg_val, &dram->mr);
650
651 writel(para->emr1, &dram->emr);
652 writel(para->emr2, &dram->emr2);
653 writel(para->emr3, &dram->emr3);
654
Siarhei Siamashkab1ace772014-08-03 05:32:51 +0300655 /* disable drift compensation and set passive DQS window mode */
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100656 clrsetbits_le32(&dram->ccr, DRAM_CCR_DQS_DRIFT_COMP, DRAM_CCR_DQS_GATE);
657
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100658#ifdef CONFIG_MACH_SUN7I
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100659 /* Command rate timing mode 2T & 1T */
660 if (para->tpr4 & 0x1)
661 setbits_le32(&dram->ccr, DRAM_CCR_COMMAND_RATE_1T);
662#endif
Siarhei Siamashkace4d21c2014-08-03 05:32:42 +0300663 /* initialize external DRAM */
664 mctl_ddr3_initialize();
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100665
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100666 /* scan read pipe value */
667 mctl_itm_enable();
Siarhei Siamashkab1ace772014-08-03 05:32:51 +0300668
669 /* Hardware DQS gate training */
Siarhei Siamashka27942f12014-08-03 05:32:39 +0300670 ret_val = dramc_scan_readpipe();
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100671
672 if (ret_val < 0)
673 return 0;
674
Siarhei Siamashkab1ace772014-08-03 05:32:51 +0300675 /* allow to override the DQS training results with a custom delay */
676 if (para->dqs_gating_delay)
677 mctl_set_dqs_gating_delay(0, para->dqs_gating_delay);
678
679 /* set the DQS gating window type */
680 if (para->active_windowing)
681 clrbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
682 else
683 setbits_le32(&dram->ccr, DRAM_CCR_DQS_GATE);
684
685 mctl_itm_reset();
686
Ian Campbell2f1afcc2014-05-05 11:52:25 +0100687 /* configure all host port */
688 mctl_configure_hostport();
689
690 return get_ram_size((long *)PHYS_SDRAM_0, PHYS_SDRAM_0_SIZE);
691}
Siarhei Siamashkaea7cdd12014-08-03 05:32:54 +0300692
693unsigned long dramc_init(struct dram_para *para)
694{
695 unsigned long dram_size, actual_density;
696
697 /* If the dram configuration is not provided, use a default */
698 if (!para)
699 return 0;
700
701 /* if everything is known, then autodetection is not necessary */
702 if (para->io_width && para->bus_width && para->density)
703 return dramc_init_helper(para);
704
705 /* try to autodetect the DRAM bus width and density */
706 para->io_width = 16;
707 para->bus_width = 32;
Ian Campbell8f32aaa2014-10-24 21:20:47 +0100708#if defined(CONFIG_MACH_SUN4I) || defined(CONFIG_MACH_SUN5I)
Siarhei Siamashkaea7cdd12014-08-03 05:32:54 +0300709 /* only A0-A14 address lines on A10/A13, limiting max density to 4096 */
710 para->density = 4096;
711#else
712 /* all A0-A15 address lines on A20, which allow density 8192 */
713 para->density = 8192;
714#endif
715
716 dram_size = dramc_init_helper(para);
717 if (!dram_size) {
718 /* if 32-bit bus width failed, try 16-bit bus width instead */
719 para->bus_width = 16;
720 dram_size = dramc_init_helper(para);
721 if (!dram_size) {
722 /* if 16-bit bus width also failed, then bail out */
723 return dram_size;
724 }
725 }
726
727 /* check if we need to adjust the density */
728 actual_density = (dram_size >> 17) * para->io_width / para->bus_width;
729
730 if (actual_density != para->density) {
731 /* update the density and re-initialize DRAM again */
732 para->density = actual_density;
733 dram_size = dramc_init_helper(para);
734 }
735
736 return dram_size;
737}