blob: d52eabbd2b1fe84294d90895bf7b7b4670ee2734 [file] [log] [blame]
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +01001/* SPDX-License-Identifier: (GPL-2.0+ OR MIT) */
2/*
3 * Copyright (c) 2018 Microsemi Corporation
4 */
5
6#ifndef __ASM_MACH_DDR_H
7#define __ASM_MACH_DDR_H
8
9#include <asm/cacheops.h>
10#include <asm/io.h>
11#include <asm/reboot.h>
Simon Glass4dcacfc2020-05-10 11:40:13 -060012#include <linux/bitops.h>
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +010013#include <mach/common.h>
14
15#define MIPS_VCOREIII_MEMORY_DDR3
16#define MIPS_VCOREIII_DDR_SIZE CONFIG_SYS_SDRAM_SIZE
17
18#if defined(CONFIG_DDRTYPE_H5TQ1G63BFA) /* Serval1 Refboard */
19
20/* Hynix H5TQ1G63BFA (1Gbit DDR3, x16) @ 3.20ns */
21#define VC3_MPAR_bank_addr_cnt 3
22#define VC3_MPAR_row_addr_cnt 13
23#define VC3_MPAR_col_addr_cnt 10
24#define VC3_MPAR_tREFI 2437
25#define VC3_MPAR_tRAS_min 12
26#define VC3_MPAR_CL 6
27#define VC3_MPAR_tWTR 4
28#define VC3_MPAR_tRC 16
Horatiu Vultur914e7872019-01-23 16:39:42 +010029#define VC3_MPAR_tFAW 16
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +010030#define VC3_MPAR_tRP 5
31#define VC3_MPAR_tRRD 4
32#define VC3_MPAR_tRCD 5
33#define VC3_MPAR_tMRD 4
34#define VC3_MPAR_tRFC 35
35#define VC3_MPAR_CWL 5
36#define VC3_MPAR_tXPR 38
37#define VC3_MPAR_tMOD 12
38#define VC3_MPAR_tDLLK 512
39#define VC3_MPAR_tWR 5
40
41#elif defined(CONFIG_DDRTYPE_MT41J128M16HA) /* Validation board */
42
43/* Micron MT41J128M16HA-15E:D (2Gbit DDR3, x16) @ 3.20ns */
44#define VC3_MPAR_bank_addr_cnt 3
45#define VC3_MPAR_row_addr_cnt 14
46#define VC3_MPAR_col_addr_cnt 10
47#define VC3_MPAR_tREFI 2437
48#define VC3_MPAR_tRAS_min 12
49#define VC3_MPAR_CL 5
50#define VC3_MPAR_tWTR 4
51#define VC3_MPAR_tRC 16
52#define VC3_MPAR_tFAW 16
53#define VC3_MPAR_tRP 5
54#define VC3_MPAR_tRRD 4
55#define VC3_MPAR_tRCD 5
56#define VC3_MPAR_tMRD 4
57#define VC3_MPAR_tRFC 50
58#define VC3_MPAR_CWL 5
59#define VC3_MPAR_tXPR 54
60#define VC3_MPAR_tMOD 12
61#define VC3_MPAR_tDLLK 512
62#define VC3_MPAR_tWR 5
63
64#elif defined(CONFIG_DDRTYPE_MT41K256M16) /* JR2 Validation board */
65
66/* Micron MT41K256M16 (4Gbit, DDR3L-800, 256Mbitx16) @ 3.20ns */
67#define VC3_MPAR_bank_addr_cnt 3
68#define VC3_MPAR_row_addr_cnt 15
69#define VC3_MPAR_col_addr_cnt 10
70#define VC3_MPAR_tREFI 2437
71#define VC3_MPAR_tRAS_min 12
72#define VC3_MPAR_CL 5
73#define VC3_MPAR_tWTR 4
74#define VC3_MPAR_tRC 16
75#define VC3_MPAR_tFAW 16
76#define VC3_MPAR_tRP 5
77#define VC3_MPAR_tRRD 4
78#define VC3_MPAR_tRCD 5
79#define VC3_MPAR_tMRD 4
80#define VC3_MPAR_tRFC 82
81#define VC3_MPAR_CWL 5
82#define VC3_MPAR_tXPR 85
83#define VC3_MPAR_tMOD 12
84#define VC3_MPAR_tDLLK 512
85#define VC3_MPAR_tWR 5
86
87#elif defined(CONFIG_DDRTYPE_H5TQ4G63MFR) /* JR2 Reference board */
88
89/* Hynix H5TQ4G63MFR-PBC (4Gbit, DDR3-800, 256Mbitx16) - 2kb pages @ 3.20ns */
90#define VC3_MPAR_bank_addr_cnt 3
91#define VC3_MPAR_row_addr_cnt 15
92#define VC3_MPAR_col_addr_cnt 10
93#define VC3_MPAR_tREFI 2437
94#define VC3_MPAR_tRAS_min 12
95#define VC3_MPAR_CL 6
96#define VC3_MPAR_tWTR 4
97#define VC3_MPAR_tRC 17
98#define VC3_MPAR_tFAW 16
99#define VC3_MPAR_tRP 5
100#define VC3_MPAR_tRRD 4
101#define VC3_MPAR_tRCD 5
102#define VC3_MPAR_tMRD 4
103#define VC3_MPAR_tRFC 82
104#define VC3_MPAR_CWL 5
105#define VC3_MPAR_tXPR 85
106#define VC3_MPAR_tMOD 12
107#define VC3_MPAR_tDLLK 512
108#define VC3_MPAR_tWR 5
109
110#elif defined(CONFIG_DDRTYPE_MT41K128M16JT)
111
112/* Micron Micron MT41K128M16JT-125 (2Gbit DDR3L, 128Mbitx16) @ 3.20ns */
113#define VC3_MPAR_bank_addr_cnt 3
114#define VC3_MPAR_row_addr_cnt 14
115#define VC3_MPAR_col_addr_cnt 10
116#define VC3_MPAR_tREFI 2437
117#define VC3_MPAR_tRAS_min 12
118#define VC3_MPAR_CL 6
119#define VC3_MPAR_tWTR 4
120#define VC3_MPAR_tRC 16
121#define VC3_MPAR_tFAW 16
122#define VC3_MPAR_tRP 5
123#define VC3_MPAR_tRRD 4
124#define VC3_MPAR_tRCD 5
125#define VC3_MPAR_tMRD 4
126#define VC3_MPAR_tRFC 82
127#define VC3_MPAR_CWL 5
128#define VC3_MPAR_tXPR 85
129#define VC3_MPAR_tMOD 12
130#define VC3_MPAR_tDLLK 512
131#define VC3_MPAR_tWR 5
132
133#elif defined(CONFIG_DDRTYPE_MT47H128M8HQ) /* Luton10/26 Refboards */
134
135/* Micron 1Gb MT47H128M8-3 16Meg x 8 x 8 banks, DDR-533@CL4 @ 4.80ns */
136#define VC3_MPAR_bank_addr_cnt 3
137#define VC3_MPAR_row_addr_cnt 14
138#define VC3_MPAR_col_addr_cnt 10
139#define VC3_MPAR_tREFI 1625
140#define VC3_MPAR_tRAS_min 9
141#define VC3_MPAR_CL 4
142#define VC3_MPAR_tWTR 2
143#define VC3_MPAR_tRC 12
144#define VC3_MPAR_tFAW 8
145#define VC3_MPAR_tRP 4
146#define VC3_MPAR_tRRD 2
147#define VC3_MPAR_tRCD 4
148
149#define VC3_MPAR_tRPA 4
150#define VC3_MPAR_tRP 4
151
152#define VC3_MPAR_tMRD 2
153#define VC3_MPAR_tRFC 27
154
155#define VC3_MPAR__400_ns_dly 84
156
157#define VC3_MPAR_tWR 4
158#undef MIPS_VCOREIII_MEMORY_DDR3
159#else
160
161#error Unknown DDR system configuration - please add!
162
163#endif
164
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100165#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur914e7872019-01-23 16:39:42 +0100166 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100167#define MIPS_VCOREIII_MEMORY_16BIT 1
168#endif
169
170#define MIPS_VCOREIII_MEMORY_SSTL_ODT 7
171#define MIPS_VCOREIII_MEMORY_SSTL_DRIVE 7
172#define VCOREIII_DDR_DQS_MODE_CALIBRATE
173
174#ifdef MIPS_VCOREIII_MEMORY_16BIT
175#define VC3_MPAR_16BIT 1
176#else
177#define VC3_MPAR_16BIT 0
178#endif
179
180#ifdef MIPS_VCOREIII_MEMORY_DDR3
181#define VC3_MPAR_DDR3_MODE 1 /* DDR3 */
182#define VC3_MPAR_BURST_LENGTH 8 /* Always 8 (1) for DDR3 */
183#ifdef MIPS_VCOREIII_MEMORY_16BIT
184#define VC3_MPAR_BURST_SIZE 1 /* Always 1 for DDR3/16bit */
185#else
186#define VC3_MPAR_BURST_SIZE 0
187#endif
188#else
189#define VC3_MPAR_DDR3_MODE 0 /* DDR2 */
190#ifdef MIPS_VCOREIII_MEMORY_16BIT
191#define VC3_MPAR_BURST_LENGTH 4 /* in DDR2 16-bit mode, use burstlen 4 */
192#else
193#define VC3_MPAR_BURST_LENGTH 8 /* For 8-bit IF we must run burst-8 */
194#endif
195#define VC3_MPAR_BURST_SIZE 0 /* Always 0 for DDR2 */
196#endif
197
198#define VC3_MPAR_RL VC3_MPAR_CL
199#if !defined(MIPS_VCOREIII_MEMORY_DDR3)
200#define VC3_MPAR_WL (VC3_MPAR_RL - 1)
201#define VC3_MPAR_MD VC3_MPAR_tMRD
202#define VC3_MPAR_ID VC3_MPAR__400_ns_dly
203#define VC3_MPAR_SD VC3_MPAR_tXSRD
204#define VC3_MPAR_OW (VC3_MPAR_WL - 2)
205#define VC3_MPAR_OR (VC3_MPAR_WL - 3)
206#define VC3_MPAR_RP (VC3_MPAR_bank_addr_cnt < 3 ? VC3_MPAR_tRP : VC3_MPAR_tRPA)
207#define VC3_MPAR_FAW (VC3_MPAR_bank_addr_cnt < 3 ? 1 : VC3_MPAR_tFAW)
208#define VC3_MPAR_BL (VC3_MPAR_BURST_LENGTH == 4 ? 2 : 4)
209#define MSCC_MEMPARM_MR0 \
210 (VC3_MPAR_BURST_LENGTH == 8 ? 3 : 2) | (VC3_MPAR_CL << 4) | \
211 ((VC3_MPAR_tWR - 1) << 9)
212/* DLL-on, Full-OD, AL=0, RTT=off, nDQS-on, RDQS-off, out-en */
213#define MSCC_MEMPARM_MR1 0x382
214#define MSCC_MEMPARM_MR2 0
215#define MSCC_MEMPARM_MR3 0
216#else
217#define VC3_MPAR_WL VC3_MPAR_CWL
218#define VC3_MPAR_MD VC3_MPAR_tMOD
219#define VC3_MPAR_ID VC3_MPAR_tXPR
220#define VC3_MPAR_SD VC3_MPAR_tDLLK
221#define VC3_MPAR_OW 2
222#define VC3_MPAR_OR 2
223#define VC3_MPAR_RP VC3_MPAR_tRP
224#define VC3_MPAR_FAW VC3_MPAR_tFAW
225#define VC3_MPAR_BL 4
226#define MSCC_MEMPARM_MR0 ((VC3_MPAR_RL - 4) << 4) | ((VC3_MPAR_tWR - 4) << 9)
227/* ODT_RTT: “0x0040” for 120ohm, and “0x0004” for 60ohm. */
228#define MSCC_MEMPARM_MR1 0x0040
229#define MSCC_MEMPARM_MR2 ((VC3_MPAR_WL - 5) << 3)
230#define MSCC_MEMPARM_MR3 0
231#endif /* MIPS_VCOREIII_MEMORY_DDR3 */
232
233#define MSCC_MEMPARM_MEMCFG \
234 ((MIPS_VCOREIII_DDR_SIZE > SZ_512M) ? \
235 ICPU_MEMCTRL_CFG_DDR_512MBYTE_PLUS : 0) | \
236 (VC3_MPAR_16BIT ? ICPU_MEMCTRL_CFG_DDR_WIDTH : 0) | \
237 (VC3_MPAR_DDR3_MODE ? ICPU_MEMCTRL_CFG_DDR_MODE : 0) | \
238 (VC3_MPAR_BURST_SIZE ? ICPU_MEMCTRL_CFG_BURST_SIZE : 0) | \
239 (VC3_MPAR_BURST_LENGTH == 8 ? ICPU_MEMCTRL_CFG_BURST_LEN : 0) | \
240 (VC3_MPAR_bank_addr_cnt == 3 ? ICPU_MEMCTRL_CFG_BANK_CNT : 0) | \
241 ICPU_MEMCTRL_CFG_MSB_ROW_ADDR(VC3_MPAR_row_addr_cnt - 1) | \
242 ICPU_MEMCTRL_CFG_MSB_COL_ADDR(VC3_MPAR_col_addr_cnt - 1)
243
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100244#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur914e7872019-01-23 16:39:42 +0100245 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100246#define MSCC_MEMPARM_PERIOD \
247 ICPU_MEMCTRL_REF_PERIOD_MAX_PEND_REF(8) | \
248 ICPU_MEMCTRL_REF_PERIOD_REF_PERIOD(VC3_MPAR_tREFI)
249
250#define MSCC_MEMPARM_TIMING0 \
251 ICPU_MEMCTRL_TIMING0_RD_TO_WR_DLY(VC3_MPAR_RL + VC3_MPAR_BL + 1 - \
252 VC3_MPAR_WL) | \
253 ICPU_MEMCTRL_TIMING0_WR_CS_CHANGE_DLY(VC3_MPAR_BL - 1) | \
254 ICPU_MEMCTRL_TIMING0_RD_CS_CHANGE_DLY(VC3_MPAR_BL) | \
255 ICPU_MEMCTRL_TIMING0_RAS_TO_PRECH_DLY(VC3_MPAR_tRAS_min - 1) | \
256 ICPU_MEMCTRL_TIMING0_WR_TO_PRECH_DLY(VC3_MPAR_WL + \
257 VC3_MPAR_BL + \
258 VC3_MPAR_tWR - 1) | \
259 ICPU_MEMCTRL_TIMING0_RD_TO_PRECH_DLY(VC3_MPAR_BL - 1) | \
260 ICPU_MEMCTRL_TIMING0_WR_DATA_XFR_DLY(VC3_MPAR_WL - 1) | \
261 ICPU_MEMCTRL_TIMING0_RD_DATA_XFR_DLY(VC3_MPAR_RL - 3)
262
263#define MSCC_MEMPARM_TIMING1 \
264 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_SAME_BANK_DLY(VC3_MPAR_tRC - 1) | \
265 ICPU_MEMCTRL_TIMING1_BANK8_FAW_DLY(VC3_MPAR_FAW - 1) | \
266 ICPU_MEMCTRL_TIMING1_PRECH_TO_RAS_DLY(VC3_MPAR_RP - 1) | \
267 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_DLY(VC3_MPAR_tRRD - 1) | \
268 ICPU_MEMCTRL_TIMING1_RAS_TO_CAS_DLY(VC3_MPAR_tRCD - 1) | \
269 ICPU_MEMCTRL_TIMING1_WR_TO_RD_DLY(VC3_MPAR_WL + \
270 VC3_MPAR_BL + \
271 VC3_MPAR_tWTR - 1)
272
273#define MSCC_MEMPARM_TIMING2 \
274 ICPU_MEMCTRL_TIMING2_PRECH_ALL_DLY(VC3_MPAR_RP - 1) | \
275 ICPU_MEMCTRL_TIMING2_MDSET_DLY(VC3_MPAR_MD - 1) | \
276 ICPU_MEMCTRL_TIMING2_REF_DLY(VC3_MPAR_tRFC - 1) | \
277 ICPU_MEMCTRL_TIMING2_INIT_DLY(VC3_MPAR_ID - 1)
278
279#define MSCC_MEMPARM_TIMING3 \
280 ICPU_MEMCTRL_TIMING3_WR_TO_RD_CS_CHANGE_DLY(VC3_MPAR_WL + \
281 VC3_MPAR_tWTR - 1) |\
282 ICPU_MEMCTRL_TIMING3_ODT_RD_DLY(VC3_MPAR_OR - 1) | \
283 ICPU_MEMCTRL_TIMING3_ODT_WR_DLY(VC3_MPAR_OW - 1) | \
284 ICPU_MEMCTRL_TIMING3_LOCAL_ODT_RD_DLY(VC3_MPAR_RL - 3)
285
286#else
287#define MSCC_MEMPARM_PERIOD \
288 ICPU_MEMCTRL_REF_PERIOD_MAX_PEND_REF(1) | \
289 ICPU_MEMCTRL_REF_PERIOD_REF_PERIOD(VC3_MPAR_tREFI)
290
291#define MSCC_MEMPARM_TIMING0 \
292 ICPU_MEMCTRL_TIMING0_RAS_TO_PRECH_DLY(VC3_MPAR_tRAS_min - 1) | \
293 ICPU_MEMCTRL_TIMING0_WR_TO_PRECH_DLY(VC3_MPAR_CL + \
294 (VC3_MPAR_BURST_LENGTH == 8 ? 2 : 0) + \
295 VC3_MPAR_tWR) | \
296 ICPU_MEMCTRL_TIMING0_RD_TO_PRECH_DLY(VC3_MPAR_BURST_LENGTH == 8 ? 3 : 1) | \
297 ICPU_MEMCTRL_TIMING0_WR_DATA_XFR_DLY(VC3_MPAR_CL - 3) | \
298 ICPU_MEMCTRL_TIMING0_RD_DATA_XFR_DLY(VC3_MPAR_CL - 3)
299
300#define MSCC_MEMPARM_TIMING1 \
301 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_SAME_BANK_DLY(VC3_MPAR_tRC - 1) | \
302 ICPU_MEMCTRL_TIMING1_BANK8_FAW_DLY(VC3_MPAR_tFAW - 1) | \
303 ICPU_MEMCTRL_TIMING1_PRECH_TO_RAS_DLY(VC3_MPAR_tRP - 1) | \
304 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_DLY(VC3_MPAR_tRRD - 1) | \
305 ICPU_MEMCTRL_TIMING1_RAS_TO_CAS_DLY(VC3_MPAR_tRCD - 1) | \
306 ICPU_MEMCTRL_TIMING1_WR_TO_RD_DLY(VC3_MPAR_CL + \
307 (VC3_MPAR_BURST_LENGTH == 8 ? 2 : 0) + \
308 VC3_MPAR_tWTR)
309#define MSCC_MEMPARM_TIMING2 \
310 ICPU_MEMCTRL_TIMING2_PRECH_ALL_DLY(VC3_MPAR_tRPA - 1) | \
311 ICPU_MEMCTRL_TIMING2_MDSET_DLY(VC3_MPAR_tMRD - 1) | \
312 ICPU_MEMCTRL_TIMING2_REF_DLY(VC3_MPAR_tRFC - 1) | \
313 ICPU_MEMCTRL_TIMING2_FOUR_HUNDRED_NS_DLY(VC3_MPAR__400_ns_dly)
314
315#define MSCC_MEMPARM_TIMING3 \
316 ICPU_MEMCTRL_TIMING3_WR_TO_RD_CS_CHANGE_DLY(VC3_MPAR_CL - 1) | \
317 ICPU_MEMCTRL_TIMING3_ODT_WR_DLY(VC3_MPAR_CL - 1) | \
318 ICPU_MEMCTRL_TIMING3_LOCAL_ODT_RD_DLY(VC3_MPAR_CL - 1)
319
320#endif
321
322enum {
323 DDR_TRAIN_OK,
324 DDR_TRAIN_CONTINUE,
325 DDR_TRAIN_ERROR,
326};
327
328/*
329 * We actually have very few 'pause' possibilities apart from
330 * these assembly nops (at this very early stage).
331 */
332#define PAUSE() asm volatile("nop; nop; nop; nop; nop; nop; nop; nop")
333
334/* NB: Assumes inlining as no stack is available! */
335static inline void set_dly(u32 bytelane, u32 dly)
336{
337 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
338
339 r &= ~ICPU_MEMCTRL_DQS_DLY_DQS_DLY_M;
340 r |= ICPU_MEMCTRL_DQS_DLY_DQS_DLY(dly);
341 writel(r, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
342}
343
344static inline bool incr_dly(u32 bytelane)
345{
346 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
347
348 if (ICPU_MEMCTRL_DQS_DLY_DQS_DLY(r) < 31) {
349 writel(r + 1, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
350 return true;
351 }
352
353 return false;
354}
355
356static inline bool adjust_dly(int adjust)
357{
358 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(0));
359
360 if (ICPU_MEMCTRL_DQS_DLY_DQS_DLY(r) < 31) {
361 writel(r + adjust, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(0));
362 return true;
363 }
364
365 return false;
366}
367
368/* NB: Assumes inlining as no stack is available! */
369static inline void center_dly(u32 bytelane, u32 start)
370{
371 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane)) - start;
372
373 writel(start + (r >> 1), BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
374}
375
376static inline void memphy_soft_reset(void)
377{
378 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_FIFO_RST);
379 PAUSE();
380 clrbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_FIFO_RST);
381 PAUSE();
382}
383
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100384#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur914e7872019-01-23 16:39:42 +0100385 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100386static u8 training_data[] = { 0xfe, 0x11, 0x33, 0x55, 0x77, 0x99, 0xbb, 0xdd };
387
388static inline void sleep_100ns(u32 val)
389{
390 /* Set the timer tick generator to 100 ns */
391 writel(VCOREIII_TIMER_DIVIDER - 1, BASE_CFG + ICPU_TIMER_TICK_DIV);
392
393 /* Set the timer value */
394 writel(val, BASE_CFG + ICPU_TIMER_VALUE(0));
395
396 /* Enable timer 0 for one-shot */
397 writel(ICPU_TIMER_CTRL_ONE_SHOT_ENA | ICPU_TIMER_CTRL_TIMER_ENA,
398 BASE_CFG + ICPU_TIMER_CTRL(0));
399
400 /* Wait for timer 0 to reach 0 */
401 while (readl(BASE_CFG + ICPU_TIMER_VALUE(0)) != 0)
402 ;
403}
404
Horatiu Vultur00f52852019-04-15 11:56:36 +0200405#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100406/*
407 * DDR memory sanity checking failed, tally and do hard reset
408 *
409 * NB: Assumes inlining as no stack is available!
410 */
411static inline void hal_vcoreiii_ddr_failed(void)
412{
413 register u32 reset;
414
Horatiu Vultur00f52852019-04-15 11:56:36 +0200415#if defined(CONFIG_SOC_OCELOT)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100416 writel(readl(BASE_CFG + ICPU_GPR(6)) + 1, BASE_CFG + ICPU_GPR(6));
417
418 clrbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
Horatiu Vultur00f52852019-04-15 11:56:36 +0200419#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100420
421 /* We have to execute the reset function from cache. Indeed,
422 * the reboot workaround in _machine_restart() will change the
423 * SPI NOR into SW bitbang.
424 *
425 * This will render the CPU unable to execute directly from
426 * the NOR, which is why the reset instructions are prefetched
427 * into the I-cache.
428 *
429 * When failing the DDR initialization we are executing from
430 * NOR.
431 *
432 * The last instruction in _machine_restart() will reset the
433 * MIPS CPU (and the cache), and the CPU will start executing
434 * from the reset vector.
435 */
436 reset = KSEG0ADDR(_machine_restart);
437 icache_lock((void *)reset, 128);
438 asm volatile ("jr %0"::"r" (reset));
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100439}
Horatiu Vultur00f52852019-04-15 11:56:36 +0200440#else /* JR2 || ServalT */
441static inline void hal_vcoreiii_ddr_failed(void)
442{
443 writel(0, BASE_CFG + ICPU_RESET);
444 writel(PERF_SOFT_RST_SOFT_CHIP_RST, BASE_CFG + PERF_SOFT_RST);
Horatiu Vultur00f52852019-04-15 11:56:36 +0200445}
446#endif
447
448#if defined(CONFIG_SOC_OCELOT)
449static inline void hal_vcoreiii_ddr_reset_assert(void)
450{
451 /* DDR has reset pin on GPIO 19 toggle Low-High to release */
452 setbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
453 writel(BIT(19), BASE_DEVCPU_GCB + PERF_GPIO_OUT_CLR);
454 sleep_100ns(10000);
455}
456
457static inline void hal_vcoreiii_ddr_reset_release(void)
458{
459 /* DDR has reset pin on GPIO 19 toggle Low-High to release */
460 setbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
461 writel(BIT(19), BASE_DEVCPU_GCB + PERF_GPIO_OUT_SET);
462 sleep_100ns(10000);
463}
464
Horatiu Vultur914e7872019-01-23 16:39:42 +0100465#else /* JR2 || ServalT || Serval */
Horatiu Vultur8a22b882019-01-12 18:56:56 +0100466static inline void hal_vcoreiii_ddr_reset_assert(void)
467{
468 /* Ensure the memory controller physical iface is forced reset */
469 writel(readl(BASE_CFG + ICPU_MEMPHY_CFG) |
470 ICPU_MEMPHY_CFG_PHY_RST, BASE_CFG + ICPU_MEMPHY_CFG);
471
472 /* Ensure the memory controller is forced reset */
473 writel(readl(BASE_CFG + ICPU_RESET) |
474 ICPU_RESET_MEM_RST_FORCE, BASE_CFG + ICPU_RESET);
475}
Horatiu Vultur914e7872019-01-23 16:39:42 +0100476#endif /* JR2 || ServalT || Serval */
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100477
478/*
479 * DDR memory sanity checking done, possibly enable ECC.
480 *
481 * NB: Assumes inlining as no stack is available!
482 */
483static inline void hal_vcoreiii_ddr_verified(void)
484{
485#ifdef MIPS_VCOREIII_MEMORY_ECC
486 /* Finally, enable ECC */
487 register u32 val = readl(BASE_CFG + ICPU_MEMCTRL_CFG);
488
489 val |= ICPU_MEMCTRL_CFG_DDR_ECC_ERR_ENA;
490 val &= ~ICPU_MEMCTRL_CFG_BURST_SIZE;
491
492 writel(val, BASE_CFG + ICPU_MEMCTRL_CFG);
493#endif
494
495 /* Reset Status register - sticky bits */
496 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT), BASE_CFG + ICPU_MEMCTRL_STAT);
497}
498
499/* NB: Assumes inlining as no stack is available! */
500static inline int look_for(u32 bytelane)
501{
502 register u32 i;
503
504 /* Reset FIFO in case any previous access failed */
505 for (i = 0; i < sizeof(training_data); i++) {
506 register u32 byte;
507
508 memphy_soft_reset();
509 /* Reset sticky bits */
510 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
511 BASE_CFG + ICPU_MEMCTRL_STAT);
512 /* Read data */
513 byte = __raw_readb((void __iomem *)MSCC_DDR_TO + bytelane +
514 (i * 4));
515
516 /*
517 * Prevent the compiler reordering the instruction so
518 * the read of RAM happens after the check of the
519 * errors.
520 */
521 rmb();
522 if (readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
523 (ICPU_MEMCTRL_STAT_RDATA_MASKED |
524 ICPU_MEMCTRL_STAT_RDATA_DUMMY)) {
525 /* Noise on the line */
526 goto read_error;
527 }
528 /* If mismatch, increment DQS - if possible */
529 if (byte != training_data[i]) {
530 read_error:
531 if (!incr_dly(bytelane))
532 return DDR_TRAIN_ERROR;
533 return DDR_TRAIN_CONTINUE;
534 }
535 }
536 return DDR_TRAIN_OK;
537}
538
539/* NB: Assumes inlining as no stack is available! */
540static inline int look_past(u32 bytelane)
541{
542 register u32 i;
543
544 /* Reset FIFO in case any previous access failed */
545 for (i = 0; i < sizeof(training_data); i++) {
546 register u32 byte;
547
548 memphy_soft_reset();
549 /* Ack sticky bits */
550 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
551 BASE_CFG + ICPU_MEMCTRL_STAT);
552 byte = __raw_readb((void __iomem *)MSCC_DDR_TO + bytelane +
553 (i * 4));
554 /*
555 * Prevent the compiler reordering the instruction so
556 * the read of RAM happens after the check of the
557 * errors.
558 */
559 rmb();
560 if (readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
561 (ICPU_MEMCTRL_STAT_RDATA_MASKED |
562 ICPU_MEMCTRL_STAT_RDATA_DUMMY)) {
563 /* Noise on the line */
564 goto read_error;
565 }
566 /* Bail out when we see first mismatch */
567 if (byte != training_data[i]) {
568 read_error:
569 return DDR_TRAIN_OK;
570 }
571 }
572 /* All data compares OK, increase DQS and retry */
573 if (!incr_dly(bytelane))
574 return DDR_TRAIN_ERROR;
575
576 return DDR_TRAIN_CONTINUE;
577}
578
579static inline int hal_vcoreiii_train_bytelane(u32 bytelane)
580{
581 register int res;
582 register u32 dqs_s;
583
584 set_dly(bytelane, 0); /* Start training at DQS=0 */
585 while ((res = look_for(bytelane)) == DDR_TRAIN_CONTINUE)
586 ;
587 if (res != DDR_TRAIN_OK)
588 return res;
589
590 dqs_s = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
591 while ((res = look_past(bytelane)) == DDR_TRAIN_CONTINUE)
592 ;
593 if (res != DDR_TRAIN_OK)
594 return res;
595 /* Reset FIFO - for good measure */
596 memphy_soft_reset();
597 /* Adjust to center [dqs_s;cur] */
598 center_dly(bytelane, dqs_s);
599 return DDR_TRAIN_OK;
600}
601
602/* This algorithm is converted from the TCL training algorithm used
603 * during silicon simulation.
604 * NB: Assumes inlining as no stack is available!
605 */
606static inline int hal_vcoreiii_init_dqs(void)
607{
608#define MAX_DQS 32
609 register u32 i, j;
610
611 for (i = 0; i < MAX_DQS; i++) {
612 set_dly(0, i); /* Byte-lane 0 */
613 for (j = 0; j < MAX_DQS; j++) {
614 __maybe_unused register u32 byte;
615
616 set_dly(1, j); /* Byte-lane 1 */
617 /* Reset FIFO in case any previous access failed */
618 memphy_soft_reset();
619 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
620 BASE_CFG + ICPU_MEMCTRL_STAT);
621 byte = __raw_readb((void __iomem *)MSCC_DDR_TO);
622 byte = __raw_readb((void __iomem *)(MSCC_DDR_TO + 1));
623 if (!(readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
624 (ICPU_MEMCTRL_STAT_RDATA_MASKED |
625 ICPU_MEMCTRL_STAT_RDATA_DUMMY)))
626 return 0;
627 }
628 }
629 return -1;
630}
631
632static inline int dram_check(void)
633{
634 register u32 i;
635
636 for (i = 0; i < 8; i++) {
637 __raw_writel(~i, (void __iomem *)(MSCC_DDR_TO + (i * 4)));
638 if (__raw_readl((void __iomem *)(MSCC_DDR_TO + (i * 4))) != ~i)
639 return 1;
640 }
641 return 0;
642}
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100643#else /* Luton */
644
645static inline void sleep_100ns(u32 val)
646{
647}
648
649static inline void hal_vcoreiii_ddr_reset_assert(void)
650{
651 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_RST);
652 setbits_le32(BASE_CFG + ICPU_RESET, ICPU_RESET_MEM_RST_FORCE);
653}
654
655static inline void hal_vcoreiii_ddr_reset_release(void)
656{
657}
658
659static inline void hal_vcoreiii_ddr_failed(void)
660{
661 register u32 memphy_cfg = readl(BASE_CFG + ICPU_MEMPHY_CFG);
662
663 /* Do a fifo reset and start over */
664 writel(memphy_cfg | ICPU_MEMPHY_CFG_PHY_FIFO_RST,
665 BASE_CFG + ICPU_MEMPHY_CFG);
666 writel(memphy_cfg & ~ICPU_MEMPHY_CFG_PHY_FIFO_RST,
667 BASE_CFG + ICPU_MEMPHY_CFG);
668 writel(memphy_cfg | ICPU_MEMPHY_CFG_PHY_FIFO_RST,
669 BASE_CFG + ICPU_MEMPHY_CFG);
670}
671
672static inline void hal_vcoreiii_ddr_verified(void)
673{
674}
675
676static inline int look_for(u32 data)
677{
678 register u32 byte = __raw_readb((void __iomem *)MSCC_DDR_TO);
679
680 if (data != byte) {
681 if (!incr_dly(0))
682 return DDR_TRAIN_ERROR;
683 return DDR_TRAIN_CONTINUE;
684 }
685
686 return DDR_TRAIN_OK;
687}
688
689/* This algorithm is converted from the TCL training algorithm used
690 * during silicon simulation.
691 * NB: Assumes inlining as no stack is available!
692 */
693static inline int hal_vcoreiii_train_bytelane(u32 bytelane)
694{
695 register int res;
696
697 set_dly(bytelane, 0); /* Start training at DQS=0 */
698 while ((res = look_for(0xff)) == DDR_TRAIN_CONTINUE)
699 ;
700 if (res != DDR_TRAIN_OK)
701 return res;
702
703 set_dly(bytelane, 0); /* Start training at DQS=0 */
704 while ((res = look_for(0x00)) == DDR_TRAIN_CONTINUE)
705
706 ;
707
708 if (res != DDR_TRAIN_OK)
709 return res;
710
711 adjust_dly(-3);
712
713 return DDR_TRAIN_OK;
714}
715
716static inline int hal_vcoreiii_init_dqs(void)
717{
718 return 0;
719}
720
721static inline int dram_check(void)
722{
723 register u32 i;
724
725 for (i = 0; i < 8; i++) {
726 __raw_writel(~i, (void __iomem *)(MSCC_DDR_TO + (i * 4)));
727
728 if (__raw_readl((void __iomem *)(MSCC_DDR_TO + (i * 4))) != ~i)
729 return 1;
730 }
731
732 return 0;
733}
734#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100735
736/*
737 * NB: Called *early* to init memory controller - assumes inlining as
738 * no stack is available!
739 */
740static inline void hal_vcoreiii_init_memctl(void)
741{
742 /* Ensure DDR is in reset */
743 hal_vcoreiii_ddr_reset_assert();
744
745 /* Wait maybe not needed, but ... */
746 PAUSE();
747
748 /* Drop sys ctl memory controller forced reset */
749 clrbits_le32(BASE_CFG + ICPU_RESET, ICPU_RESET_MEM_RST_FORCE);
750
751 PAUSE();
752
753 /* Drop Reset, enable SSTL */
754 writel(ICPU_MEMPHY_CFG_PHY_SSTL_ENA, BASE_CFG + ICPU_MEMPHY_CFG);
755 PAUSE();
756
757 /* Start the automatic SSTL output and ODT drive-strength calibration */
758 writel(ICPU_MEMPHY_ZCAL_ZCAL_PROG_ODT(MIPS_VCOREIII_MEMORY_SSTL_ODT) |
759 /* drive strength */
760 ICPU_MEMPHY_ZCAL_ZCAL_PROG(MIPS_VCOREIII_MEMORY_SSTL_DRIVE) |
761 /* Start calibration process */
762 ICPU_MEMPHY_ZCAL_ZCAL_ENA, BASE_CFG + ICPU_MEMPHY_ZCAL);
763
764 /* Wait for ZCAL to clear */
765 while (readl(BASE_CFG + ICPU_MEMPHY_ZCAL) & ICPU_MEMPHY_ZCAL_ZCAL_ENA)
766 ;
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100767#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
768 defined(CONFIG_SOC_SERVALT)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100769 /* Check no ZCAL_ERR */
770 if (readl(BASE_CFG + ICPU_MEMPHY_ZCAL_STAT)
771 & ICPU_MEMPHY_ZCAL_STAT_ZCAL_ERR)
772 hal_vcoreiii_ddr_failed();
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100773#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100774 /* Drive CL, CK, ODT */
775 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_ODT_OE |
776 ICPU_MEMPHY_CFG_PHY_CK_OE | ICPU_MEMPHY_CFG_PHY_CL_OE);
777
778 /* Initialize memory controller */
779 writel(MSCC_MEMPARM_MEMCFG, BASE_CFG + ICPU_MEMCTRL_CFG);
780 writel(MSCC_MEMPARM_PERIOD, BASE_CFG + ICPU_MEMCTRL_REF_PERIOD);
781
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100782#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur914e7872019-01-23 16:39:42 +0100783 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100784 writel(MSCC_MEMPARM_TIMING0, BASE_CFG + ICPU_MEMCTRL_TIMING0);
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100785#else /* Luton */
786 clrbits_le32(BASE_CFG + ICPU_MEMCTRL_TIMING0, ((1 << 20) - 1));
787 setbits_le32(BASE_CFG + ICPU_MEMCTRL_TIMING0, MSCC_MEMPARM_TIMING0);
788#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100789
790 writel(MSCC_MEMPARM_TIMING1, BASE_CFG + ICPU_MEMCTRL_TIMING1);
791 writel(MSCC_MEMPARM_TIMING2, BASE_CFG + ICPU_MEMCTRL_TIMING2);
792 writel(MSCC_MEMPARM_TIMING3, BASE_CFG + ICPU_MEMCTRL_TIMING3);
793 writel(MSCC_MEMPARM_MR0, BASE_CFG + ICPU_MEMCTRL_MR0_VAL);
794 writel(MSCC_MEMPARM_MR1, BASE_CFG + ICPU_MEMCTRL_MR1_VAL);
795 writel(MSCC_MEMPARM_MR2, BASE_CFG + ICPU_MEMCTRL_MR2_VAL);
796 writel(MSCC_MEMPARM_MR3, BASE_CFG + ICPU_MEMCTRL_MR3_VAL);
797
Horatiu Vultur914e7872019-01-23 16:39:42 +0100798#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100799 /* Termination setup - enable ODT */
800 writel(ICPU_MEMCTRL_TERMRES_CTRL_LOCAL_ODT_RD_ENA |
801 /* Assert ODT0 for any write */
802 ICPU_MEMCTRL_TERMRES_CTRL_ODT_WR_ENA(3),
803 BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
804
805 /* Release Reset from DDR */
Horatiu Vultur914e7872019-01-23 16:39:42 +0100806#if defined(CONFIG_SOC_OCELOT)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100807 hal_vcoreiii_ddr_reset_release();
Horatiu Vultur914e7872019-01-23 16:39:42 +0100808#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100809
810 writel(readl(BASE_CFG + ICPU_GPR(7)) + 1, BASE_CFG + ICPU_GPR(7));
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100811#elif defined(CONFIG_SOC_JR2) || defined(CONFIG_SOC_SERVALT)
Horatiu Vultur8a22b882019-01-12 18:56:56 +0100812 writel(ICPU_MEMCTRL_TERMRES_CTRL_ODT_WR_ENA(3),
813 BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100814#else /* Luton */
815 /* Termination setup - disable ODT */
816 writel(0, BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
817
818#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100819}
820
821static inline void hal_vcoreiii_wait_memctl(void)
822{
823 /* Now, rip it! */
824 writel(ICPU_MEMCTRL_CTRL_INITIALIZE, BASE_CFG + ICPU_MEMCTRL_CTRL);
825
826 while (!(readl(BASE_CFG + ICPU_MEMCTRL_STAT)
827 & ICPU_MEMCTRL_STAT_INIT_DONE))
828 ;
829
830 /* Settle...? */
831 sleep_100ns(10000);
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100832#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur914e7872019-01-23 16:39:42 +0100833 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100834 /* Establish data contents in DDR RAM for training */
835
836 __raw_writel(0xcacafefe, ((void __iomem *)MSCC_DDR_TO));
837 __raw_writel(0x22221111, ((void __iomem *)MSCC_DDR_TO + 0x4));
838 __raw_writel(0x44443333, ((void __iomem *)MSCC_DDR_TO + 0x8));
839 __raw_writel(0x66665555, ((void __iomem *)MSCC_DDR_TO + 0xC));
840 __raw_writel(0x88887777, ((void __iomem *)MSCC_DDR_TO + 0x10));
841 __raw_writel(0xaaaa9999, ((void __iomem *)MSCC_DDR_TO + 0x14));
842 __raw_writel(0xccccbbbb, ((void __iomem *)MSCC_DDR_TO + 0x18));
843 __raw_writel(0xeeeedddd, ((void __iomem *)MSCC_DDR_TO + 0x1C));
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100844#else
845 __raw_writel(0xff, ((void __iomem *)MSCC_DDR_TO));
846#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100847}
848#endif /* __ASM_MACH_DDR_H */