blob: 3ba33d27c178dc18771e464fa85ff643c4b5f33f [file] [log] [blame]
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +01001/* SPDX-License-Identifier: (GPL-2.0+ OR MIT) */
2/*
3 * Copyright (c) 2018 Microsemi Corporation
4 */
5
6#ifndef __ASM_MACH_DDR_H
7#define __ASM_MACH_DDR_H
8
Tom Rinid6e2acc2023-10-12 19:03:58 -04009#include <config.h>
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +010010#include <asm/cacheops.h>
11#include <asm/io.h>
12#include <asm/reboot.h>
Simon Glass4dcacfc2020-05-10 11:40:13 -060013#include <linux/bitops.h>
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +010014#include <mach/common.h>
15
16#define MIPS_VCOREIII_MEMORY_DDR3
Tom Rinibb4dd962022-11-16 13:10:37 -050017#define MIPS_VCOREIII_DDR_SIZE CFG_SYS_SDRAM_SIZE
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +010018
19#if defined(CONFIG_DDRTYPE_H5TQ1G63BFA) /* Serval1 Refboard */
20
21/* Hynix H5TQ1G63BFA (1Gbit DDR3, x16) @ 3.20ns */
22#define VC3_MPAR_bank_addr_cnt 3
23#define VC3_MPAR_row_addr_cnt 13
24#define VC3_MPAR_col_addr_cnt 10
25#define VC3_MPAR_tREFI 2437
26#define VC3_MPAR_tRAS_min 12
27#define VC3_MPAR_CL 6
28#define VC3_MPAR_tWTR 4
29#define VC3_MPAR_tRC 16
Horatiu Vultur914e7872019-01-23 16:39:42 +010030#define VC3_MPAR_tFAW 16
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +010031#define VC3_MPAR_tRP 5
32#define VC3_MPAR_tRRD 4
33#define VC3_MPAR_tRCD 5
34#define VC3_MPAR_tMRD 4
35#define VC3_MPAR_tRFC 35
36#define VC3_MPAR_CWL 5
37#define VC3_MPAR_tXPR 38
38#define VC3_MPAR_tMOD 12
39#define VC3_MPAR_tDLLK 512
40#define VC3_MPAR_tWR 5
41
42#elif defined(CONFIG_DDRTYPE_MT41J128M16HA) /* Validation board */
43
44/* Micron MT41J128M16HA-15E:D (2Gbit DDR3, x16) @ 3.20ns */
45#define VC3_MPAR_bank_addr_cnt 3
46#define VC3_MPAR_row_addr_cnt 14
47#define VC3_MPAR_col_addr_cnt 10
48#define VC3_MPAR_tREFI 2437
49#define VC3_MPAR_tRAS_min 12
50#define VC3_MPAR_CL 5
51#define VC3_MPAR_tWTR 4
52#define VC3_MPAR_tRC 16
53#define VC3_MPAR_tFAW 16
54#define VC3_MPAR_tRP 5
55#define VC3_MPAR_tRRD 4
56#define VC3_MPAR_tRCD 5
57#define VC3_MPAR_tMRD 4
58#define VC3_MPAR_tRFC 50
59#define VC3_MPAR_CWL 5
60#define VC3_MPAR_tXPR 54
61#define VC3_MPAR_tMOD 12
62#define VC3_MPAR_tDLLK 512
63#define VC3_MPAR_tWR 5
64
65#elif defined(CONFIG_DDRTYPE_MT41K256M16) /* JR2 Validation board */
66
67/* Micron MT41K256M16 (4Gbit, DDR3L-800, 256Mbitx16) @ 3.20ns */
68#define VC3_MPAR_bank_addr_cnt 3
69#define VC3_MPAR_row_addr_cnt 15
70#define VC3_MPAR_col_addr_cnt 10
71#define VC3_MPAR_tREFI 2437
72#define VC3_MPAR_tRAS_min 12
73#define VC3_MPAR_CL 5
74#define VC3_MPAR_tWTR 4
75#define VC3_MPAR_tRC 16
76#define VC3_MPAR_tFAW 16
77#define VC3_MPAR_tRP 5
78#define VC3_MPAR_tRRD 4
79#define VC3_MPAR_tRCD 5
80#define VC3_MPAR_tMRD 4
81#define VC3_MPAR_tRFC 82
82#define VC3_MPAR_CWL 5
83#define VC3_MPAR_tXPR 85
84#define VC3_MPAR_tMOD 12
85#define VC3_MPAR_tDLLK 512
86#define VC3_MPAR_tWR 5
87
88#elif defined(CONFIG_DDRTYPE_H5TQ4G63MFR) /* JR2 Reference board */
89
90/* Hynix H5TQ4G63MFR-PBC (4Gbit, DDR3-800, 256Mbitx16) - 2kb pages @ 3.20ns */
91#define VC3_MPAR_bank_addr_cnt 3
92#define VC3_MPAR_row_addr_cnt 15
93#define VC3_MPAR_col_addr_cnt 10
94#define VC3_MPAR_tREFI 2437
95#define VC3_MPAR_tRAS_min 12
96#define VC3_MPAR_CL 6
97#define VC3_MPAR_tWTR 4
98#define VC3_MPAR_tRC 17
99#define VC3_MPAR_tFAW 16
100#define VC3_MPAR_tRP 5
101#define VC3_MPAR_tRRD 4
102#define VC3_MPAR_tRCD 5
103#define VC3_MPAR_tMRD 4
104#define VC3_MPAR_tRFC 82
105#define VC3_MPAR_CWL 5
106#define VC3_MPAR_tXPR 85
107#define VC3_MPAR_tMOD 12
108#define VC3_MPAR_tDLLK 512
109#define VC3_MPAR_tWR 5
110
111#elif defined(CONFIG_DDRTYPE_MT41K128M16JT)
112
113/* Micron Micron MT41K128M16JT-125 (2Gbit DDR3L, 128Mbitx16) @ 3.20ns */
114#define VC3_MPAR_bank_addr_cnt 3
115#define VC3_MPAR_row_addr_cnt 14
116#define VC3_MPAR_col_addr_cnt 10
117#define VC3_MPAR_tREFI 2437
118#define VC3_MPAR_tRAS_min 12
119#define VC3_MPAR_CL 6
120#define VC3_MPAR_tWTR 4
121#define VC3_MPAR_tRC 16
122#define VC3_MPAR_tFAW 16
123#define VC3_MPAR_tRP 5
124#define VC3_MPAR_tRRD 4
125#define VC3_MPAR_tRCD 5
126#define VC3_MPAR_tMRD 4
127#define VC3_MPAR_tRFC 82
128#define VC3_MPAR_CWL 5
129#define VC3_MPAR_tXPR 85
130#define VC3_MPAR_tMOD 12
131#define VC3_MPAR_tDLLK 512
132#define VC3_MPAR_tWR 5
133
134#elif defined(CONFIG_DDRTYPE_MT47H128M8HQ) /* Luton10/26 Refboards */
135
136/* Micron 1Gb MT47H128M8-3 16Meg x 8 x 8 banks, DDR-533@CL4 @ 4.80ns */
137#define VC3_MPAR_bank_addr_cnt 3
138#define VC3_MPAR_row_addr_cnt 14
139#define VC3_MPAR_col_addr_cnt 10
140#define VC3_MPAR_tREFI 1625
141#define VC3_MPAR_tRAS_min 9
142#define VC3_MPAR_CL 4
143#define VC3_MPAR_tWTR 2
144#define VC3_MPAR_tRC 12
145#define VC3_MPAR_tFAW 8
146#define VC3_MPAR_tRP 4
147#define VC3_MPAR_tRRD 2
148#define VC3_MPAR_tRCD 4
149
150#define VC3_MPAR_tRPA 4
151#define VC3_MPAR_tRP 4
152
153#define VC3_MPAR_tMRD 2
154#define VC3_MPAR_tRFC 27
155
156#define VC3_MPAR__400_ns_dly 84
157
158#define VC3_MPAR_tWR 4
159#undef MIPS_VCOREIII_MEMORY_DDR3
160#else
161
162#error Unknown DDR system configuration - please add!
163
164#endif
165
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100166#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur914e7872019-01-23 16:39:42 +0100167 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100168#define MIPS_VCOREIII_MEMORY_16BIT 1
169#endif
170
171#define MIPS_VCOREIII_MEMORY_SSTL_ODT 7
172#define MIPS_VCOREIII_MEMORY_SSTL_DRIVE 7
173#define VCOREIII_DDR_DQS_MODE_CALIBRATE
174
175#ifdef MIPS_VCOREIII_MEMORY_16BIT
176#define VC3_MPAR_16BIT 1
177#else
178#define VC3_MPAR_16BIT 0
179#endif
180
181#ifdef MIPS_VCOREIII_MEMORY_DDR3
182#define VC3_MPAR_DDR3_MODE 1 /* DDR3 */
183#define VC3_MPAR_BURST_LENGTH 8 /* Always 8 (1) for DDR3 */
184#ifdef MIPS_VCOREIII_MEMORY_16BIT
185#define VC3_MPAR_BURST_SIZE 1 /* Always 1 for DDR3/16bit */
186#else
187#define VC3_MPAR_BURST_SIZE 0
188#endif
189#else
190#define VC3_MPAR_DDR3_MODE 0 /* DDR2 */
191#ifdef MIPS_VCOREIII_MEMORY_16BIT
192#define VC3_MPAR_BURST_LENGTH 4 /* in DDR2 16-bit mode, use burstlen 4 */
193#else
194#define VC3_MPAR_BURST_LENGTH 8 /* For 8-bit IF we must run burst-8 */
195#endif
196#define VC3_MPAR_BURST_SIZE 0 /* Always 0 for DDR2 */
197#endif
198
199#define VC3_MPAR_RL VC3_MPAR_CL
200#if !defined(MIPS_VCOREIII_MEMORY_DDR3)
201#define VC3_MPAR_WL (VC3_MPAR_RL - 1)
202#define VC3_MPAR_MD VC3_MPAR_tMRD
203#define VC3_MPAR_ID VC3_MPAR__400_ns_dly
204#define VC3_MPAR_SD VC3_MPAR_tXSRD
205#define VC3_MPAR_OW (VC3_MPAR_WL - 2)
206#define VC3_MPAR_OR (VC3_MPAR_WL - 3)
207#define VC3_MPAR_RP (VC3_MPAR_bank_addr_cnt < 3 ? VC3_MPAR_tRP : VC3_MPAR_tRPA)
208#define VC3_MPAR_FAW (VC3_MPAR_bank_addr_cnt < 3 ? 1 : VC3_MPAR_tFAW)
209#define VC3_MPAR_BL (VC3_MPAR_BURST_LENGTH == 4 ? 2 : 4)
210#define MSCC_MEMPARM_MR0 \
211 (VC3_MPAR_BURST_LENGTH == 8 ? 3 : 2) | (VC3_MPAR_CL << 4) | \
212 ((VC3_MPAR_tWR - 1) << 9)
213/* DLL-on, Full-OD, AL=0, RTT=off, nDQS-on, RDQS-off, out-en */
214#define MSCC_MEMPARM_MR1 0x382
215#define MSCC_MEMPARM_MR2 0
216#define MSCC_MEMPARM_MR3 0
217#else
218#define VC3_MPAR_WL VC3_MPAR_CWL
219#define VC3_MPAR_MD VC3_MPAR_tMOD
220#define VC3_MPAR_ID VC3_MPAR_tXPR
221#define VC3_MPAR_SD VC3_MPAR_tDLLK
222#define VC3_MPAR_OW 2
223#define VC3_MPAR_OR 2
224#define VC3_MPAR_RP VC3_MPAR_tRP
225#define VC3_MPAR_FAW VC3_MPAR_tFAW
226#define VC3_MPAR_BL 4
227#define MSCC_MEMPARM_MR0 ((VC3_MPAR_RL - 4) << 4) | ((VC3_MPAR_tWR - 4) << 9)
228/* ODT_RTT: “0x0040” for 120ohm, and “0x0004” for 60ohm. */
229#define MSCC_MEMPARM_MR1 0x0040
230#define MSCC_MEMPARM_MR2 ((VC3_MPAR_WL - 5) << 3)
231#define MSCC_MEMPARM_MR3 0
232#endif /* MIPS_VCOREIII_MEMORY_DDR3 */
233
234#define MSCC_MEMPARM_MEMCFG \
235 ((MIPS_VCOREIII_DDR_SIZE > SZ_512M) ? \
236 ICPU_MEMCTRL_CFG_DDR_512MBYTE_PLUS : 0) | \
237 (VC3_MPAR_16BIT ? ICPU_MEMCTRL_CFG_DDR_WIDTH : 0) | \
238 (VC3_MPAR_DDR3_MODE ? ICPU_MEMCTRL_CFG_DDR_MODE : 0) | \
239 (VC3_MPAR_BURST_SIZE ? ICPU_MEMCTRL_CFG_BURST_SIZE : 0) | \
240 (VC3_MPAR_BURST_LENGTH == 8 ? ICPU_MEMCTRL_CFG_BURST_LEN : 0) | \
241 (VC3_MPAR_bank_addr_cnt == 3 ? ICPU_MEMCTRL_CFG_BANK_CNT : 0) | \
242 ICPU_MEMCTRL_CFG_MSB_ROW_ADDR(VC3_MPAR_row_addr_cnt - 1) | \
243 ICPU_MEMCTRL_CFG_MSB_COL_ADDR(VC3_MPAR_col_addr_cnt - 1)
244
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100245#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur914e7872019-01-23 16:39:42 +0100246 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100247#define MSCC_MEMPARM_PERIOD \
248 ICPU_MEMCTRL_REF_PERIOD_MAX_PEND_REF(8) | \
249 ICPU_MEMCTRL_REF_PERIOD_REF_PERIOD(VC3_MPAR_tREFI)
250
251#define MSCC_MEMPARM_TIMING0 \
252 ICPU_MEMCTRL_TIMING0_RD_TO_WR_DLY(VC3_MPAR_RL + VC3_MPAR_BL + 1 - \
253 VC3_MPAR_WL) | \
254 ICPU_MEMCTRL_TIMING0_WR_CS_CHANGE_DLY(VC3_MPAR_BL - 1) | \
255 ICPU_MEMCTRL_TIMING0_RD_CS_CHANGE_DLY(VC3_MPAR_BL) | \
256 ICPU_MEMCTRL_TIMING0_RAS_TO_PRECH_DLY(VC3_MPAR_tRAS_min - 1) | \
257 ICPU_MEMCTRL_TIMING0_WR_TO_PRECH_DLY(VC3_MPAR_WL + \
258 VC3_MPAR_BL + \
259 VC3_MPAR_tWR - 1) | \
260 ICPU_MEMCTRL_TIMING0_RD_TO_PRECH_DLY(VC3_MPAR_BL - 1) | \
261 ICPU_MEMCTRL_TIMING0_WR_DATA_XFR_DLY(VC3_MPAR_WL - 1) | \
262 ICPU_MEMCTRL_TIMING0_RD_DATA_XFR_DLY(VC3_MPAR_RL - 3)
263
264#define MSCC_MEMPARM_TIMING1 \
265 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_SAME_BANK_DLY(VC3_MPAR_tRC - 1) | \
266 ICPU_MEMCTRL_TIMING1_BANK8_FAW_DLY(VC3_MPAR_FAW - 1) | \
267 ICPU_MEMCTRL_TIMING1_PRECH_TO_RAS_DLY(VC3_MPAR_RP - 1) | \
268 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_DLY(VC3_MPAR_tRRD - 1) | \
269 ICPU_MEMCTRL_TIMING1_RAS_TO_CAS_DLY(VC3_MPAR_tRCD - 1) | \
270 ICPU_MEMCTRL_TIMING1_WR_TO_RD_DLY(VC3_MPAR_WL + \
271 VC3_MPAR_BL + \
272 VC3_MPAR_tWTR - 1)
273
274#define MSCC_MEMPARM_TIMING2 \
275 ICPU_MEMCTRL_TIMING2_PRECH_ALL_DLY(VC3_MPAR_RP - 1) | \
276 ICPU_MEMCTRL_TIMING2_MDSET_DLY(VC3_MPAR_MD - 1) | \
277 ICPU_MEMCTRL_TIMING2_REF_DLY(VC3_MPAR_tRFC - 1) | \
278 ICPU_MEMCTRL_TIMING2_INIT_DLY(VC3_MPAR_ID - 1)
279
280#define MSCC_MEMPARM_TIMING3 \
281 ICPU_MEMCTRL_TIMING3_WR_TO_RD_CS_CHANGE_DLY(VC3_MPAR_WL + \
282 VC3_MPAR_tWTR - 1) |\
283 ICPU_MEMCTRL_TIMING3_ODT_RD_DLY(VC3_MPAR_OR - 1) | \
284 ICPU_MEMCTRL_TIMING3_ODT_WR_DLY(VC3_MPAR_OW - 1) | \
285 ICPU_MEMCTRL_TIMING3_LOCAL_ODT_RD_DLY(VC3_MPAR_RL - 3)
286
287#else
288#define MSCC_MEMPARM_PERIOD \
289 ICPU_MEMCTRL_REF_PERIOD_MAX_PEND_REF(1) | \
290 ICPU_MEMCTRL_REF_PERIOD_REF_PERIOD(VC3_MPAR_tREFI)
291
292#define MSCC_MEMPARM_TIMING0 \
293 ICPU_MEMCTRL_TIMING0_RAS_TO_PRECH_DLY(VC3_MPAR_tRAS_min - 1) | \
294 ICPU_MEMCTRL_TIMING0_WR_TO_PRECH_DLY(VC3_MPAR_CL + \
295 (VC3_MPAR_BURST_LENGTH == 8 ? 2 : 0) + \
296 VC3_MPAR_tWR) | \
297 ICPU_MEMCTRL_TIMING0_RD_TO_PRECH_DLY(VC3_MPAR_BURST_LENGTH == 8 ? 3 : 1) | \
298 ICPU_MEMCTRL_TIMING0_WR_DATA_XFR_DLY(VC3_MPAR_CL - 3) | \
299 ICPU_MEMCTRL_TIMING0_RD_DATA_XFR_DLY(VC3_MPAR_CL - 3)
300
301#define MSCC_MEMPARM_TIMING1 \
302 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_SAME_BANK_DLY(VC3_MPAR_tRC - 1) | \
303 ICPU_MEMCTRL_TIMING1_BANK8_FAW_DLY(VC3_MPAR_tFAW - 1) | \
304 ICPU_MEMCTRL_TIMING1_PRECH_TO_RAS_DLY(VC3_MPAR_tRP - 1) | \
305 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_DLY(VC3_MPAR_tRRD - 1) | \
306 ICPU_MEMCTRL_TIMING1_RAS_TO_CAS_DLY(VC3_MPAR_tRCD - 1) | \
307 ICPU_MEMCTRL_TIMING1_WR_TO_RD_DLY(VC3_MPAR_CL + \
308 (VC3_MPAR_BURST_LENGTH == 8 ? 2 : 0) + \
309 VC3_MPAR_tWTR)
310#define MSCC_MEMPARM_TIMING2 \
311 ICPU_MEMCTRL_TIMING2_PRECH_ALL_DLY(VC3_MPAR_tRPA - 1) | \
312 ICPU_MEMCTRL_TIMING2_MDSET_DLY(VC3_MPAR_tMRD - 1) | \
313 ICPU_MEMCTRL_TIMING2_REF_DLY(VC3_MPAR_tRFC - 1) | \
314 ICPU_MEMCTRL_TIMING2_FOUR_HUNDRED_NS_DLY(VC3_MPAR__400_ns_dly)
315
316#define MSCC_MEMPARM_TIMING3 \
317 ICPU_MEMCTRL_TIMING3_WR_TO_RD_CS_CHANGE_DLY(VC3_MPAR_CL - 1) | \
318 ICPU_MEMCTRL_TIMING3_ODT_WR_DLY(VC3_MPAR_CL - 1) | \
319 ICPU_MEMCTRL_TIMING3_LOCAL_ODT_RD_DLY(VC3_MPAR_CL - 1)
320
321#endif
322
323enum {
324 DDR_TRAIN_OK,
325 DDR_TRAIN_CONTINUE,
326 DDR_TRAIN_ERROR,
327};
328
329/*
330 * We actually have very few 'pause' possibilities apart from
331 * these assembly nops (at this very early stage).
332 */
333#define PAUSE() asm volatile("nop; nop; nop; nop; nop; nop; nop; nop")
334
335/* NB: Assumes inlining as no stack is available! */
336static inline void set_dly(u32 bytelane, u32 dly)
337{
338 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
339
340 r &= ~ICPU_MEMCTRL_DQS_DLY_DQS_DLY_M;
341 r |= ICPU_MEMCTRL_DQS_DLY_DQS_DLY(dly);
342 writel(r, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
343}
344
345static inline bool incr_dly(u32 bytelane)
346{
347 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
348
349 if (ICPU_MEMCTRL_DQS_DLY_DQS_DLY(r) < 31) {
350 writel(r + 1, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
351 return true;
352 }
353
354 return false;
355}
356
357static inline bool adjust_dly(int adjust)
358{
359 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(0));
360
361 if (ICPU_MEMCTRL_DQS_DLY_DQS_DLY(r) < 31) {
362 writel(r + adjust, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(0));
363 return true;
364 }
365
366 return false;
367}
368
369/* NB: Assumes inlining as no stack is available! */
370static inline void center_dly(u32 bytelane, u32 start)
371{
372 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane)) - start;
373
374 writel(start + (r >> 1), BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
375}
376
377static inline void memphy_soft_reset(void)
378{
379 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_FIFO_RST);
380 PAUSE();
381 clrbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_FIFO_RST);
382 PAUSE();
383}
384
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100385#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur914e7872019-01-23 16:39:42 +0100386 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100387static u8 training_data[] = { 0xfe, 0x11, 0x33, 0x55, 0x77, 0x99, 0xbb, 0xdd };
388
389static inline void sleep_100ns(u32 val)
390{
391 /* Set the timer tick generator to 100 ns */
392 writel(VCOREIII_TIMER_DIVIDER - 1, BASE_CFG + ICPU_TIMER_TICK_DIV);
393
394 /* Set the timer value */
395 writel(val, BASE_CFG + ICPU_TIMER_VALUE(0));
396
397 /* Enable timer 0 for one-shot */
398 writel(ICPU_TIMER_CTRL_ONE_SHOT_ENA | ICPU_TIMER_CTRL_TIMER_ENA,
399 BASE_CFG + ICPU_TIMER_CTRL(0));
400
401 /* Wait for timer 0 to reach 0 */
402 while (readl(BASE_CFG + ICPU_TIMER_VALUE(0)) != 0)
403 ;
404}
405
Horatiu Vultur00f52852019-04-15 11:56:36 +0200406#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100407/*
408 * DDR memory sanity checking failed, tally and do hard reset
409 *
410 * NB: Assumes inlining as no stack is available!
411 */
412static inline void hal_vcoreiii_ddr_failed(void)
413{
414 register u32 reset;
415
Horatiu Vultur00f52852019-04-15 11:56:36 +0200416#if defined(CONFIG_SOC_OCELOT)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100417 writel(readl(BASE_CFG + ICPU_GPR(6)) + 1, BASE_CFG + ICPU_GPR(6));
418
419 clrbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
Horatiu Vultur00f52852019-04-15 11:56:36 +0200420#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100421
422 /* We have to execute the reset function from cache. Indeed,
423 * the reboot workaround in _machine_restart() will change the
424 * SPI NOR into SW bitbang.
425 *
426 * This will render the CPU unable to execute directly from
427 * the NOR, which is why the reset instructions are prefetched
428 * into the I-cache.
429 *
430 * When failing the DDR initialization we are executing from
431 * NOR.
432 *
433 * The last instruction in _machine_restart() will reset the
434 * MIPS CPU (and the cache), and the CPU will start executing
435 * from the reset vector.
436 */
437 reset = KSEG0ADDR(_machine_restart);
438 icache_lock((void *)reset, 128);
439 asm volatile ("jr %0"::"r" (reset));
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100440}
Horatiu Vultur00f52852019-04-15 11:56:36 +0200441#else /* JR2 || ServalT */
442static inline void hal_vcoreiii_ddr_failed(void)
443{
444 writel(0, BASE_CFG + ICPU_RESET);
445 writel(PERF_SOFT_RST_SOFT_CHIP_RST, BASE_CFG + PERF_SOFT_RST);
Horatiu Vultur00f52852019-04-15 11:56:36 +0200446}
447#endif
448
449#if defined(CONFIG_SOC_OCELOT)
450static inline void hal_vcoreiii_ddr_reset_assert(void)
451{
452 /* DDR has reset pin on GPIO 19 toggle Low-High to release */
453 setbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
454 writel(BIT(19), BASE_DEVCPU_GCB + PERF_GPIO_OUT_CLR);
455 sleep_100ns(10000);
456}
457
458static inline void hal_vcoreiii_ddr_reset_release(void)
459{
460 /* DDR has reset pin on GPIO 19 toggle Low-High to release */
461 setbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
462 writel(BIT(19), BASE_DEVCPU_GCB + PERF_GPIO_OUT_SET);
463 sleep_100ns(10000);
464}
465
Horatiu Vultur914e7872019-01-23 16:39:42 +0100466#else /* JR2 || ServalT || Serval */
Horatiu Vultur8a22b882019-01-12 18:56:56 +0100467static inline void hal_vcoreiii_ddr_reset_assert(void)
468{
469 /* Ensure the memory controller physical iface is forced reset */
470 writel(readl(BASE_CFG + ICPU_MEMPHY_CFG) |
471 ICPU_MEMPHY_CFG_PHY_RST, BASE_CFG + ICPU_MEMPHY_CFG);
472
473 /* Ensure the memory controller is forced reset */
474 writel(readl(BASE_CFG + ICPU_RESET) |
475 ICPU_RESET_MEM_RST_FORCE, BASE_CFG + ICPU_RESET);
476}
Horatiu Vultur914e7872019-01-23 16:39:42 +0100477#endif /* JR2 || ServalT || Serval */
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100478
479/*
480 * DDR memory sanity checking done, possibly enable ECC.
481 *
482 * NB: Assumes inlining as no stack is available!
483 */
484static inline void hal_vcoreiii_ddr_verified(void)
485{
486#ifdef MIPS_VCOREIII_MEMORY_ECC
487 /* Finally, enable ECC */
488 register u32 val = readl(BASE_CFG + ICPU_MEMCTRL_CFG);
489
490 val |= ICPU_MEMCTRL_CFG_DDR_ECC_ERR_ENA;
491 val &= ~ICPU_MEMCTRL_CFG_BURST_SIZE;
492
493 writel(val, BASE_CFG + ICPU_MEMCTRL_CFG);
494#endif
495
496 /* Reset Status register - sticky bits */
497 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT), BASE_CFG + ICPU_MEMCTRL_STAT);
498}
499
500/* NB: Assumes inlining as no stack is available! */
501static inline int look_for(u32 bytelane)
502{
503 register u32 i;
504
505 /* Reset FIFO in case any previous access failed */
506 for (i = 0; i < sizeof(training_data); i++) {
507 register u32 byte;
508
509 memphy_soft_reset();
510 /* Reset sticky bits */
511 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
512 BASE_CFG + ICPU_MEMCTRL_STAT);
513 /* Read data */
514 byte = __raw_readb((void __iomem *)MSCC_DDR_TO + bytelane +
515 (i * 4));
516
517 /*
518 * Prevent the compiler reordering the instruction so
519 * the read of RAM happens after the check of the
520 * errors.
521 */
522 rmb();
523 if (readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
524 (ICPU_MEMCTRL_STAT_RDATA_MASKED |
525 ICPU_MEMCTRL_STAT_RDATA_DUMMY)) {
526 /* Noise on the line */
527 goto read_error;
528 }
529 /* If mismatch, increment DQS - if possible */
530 if (byte != training_data[i]) {
531 read_error:
532 if (!incr_dly(bytelane))
533 return DDR_TRAIN_ERROR;
534 return DDR_TRAIN_CONTINUE;
535 }
536 }
537 return DDR_TRAIN_OK;
538}
539
540/* NB: Assumes inlining as no stack is available! */
541static inline int look_past(u32 bytelane)
542{
543 register u32 i;
544
545 /* Reset FIFO in case any previous access failed */
546 for (i = 0; i < sizeof(training_data); i++) {
547 register u32 byte;
548
549 memphy_soft_reset();
550 /* Ack sticky bits */
551 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
552 BASE_CFG + ICPU_MEMCTRL_STAT);
553 byte = __raw_readb((void __iomem *)MSCC_DDR_TO + bytelane +
554 (i * 4));
555 /*
556 * Prevent the compiler reordering the instruction so
557 * the read of RAM happens after the check of the
558 * errors.
559 */
560 rmb();
561 if (readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
562 (ICPU_MEMCTRL_STAT_RDATA_MASKED |
563 ICPU_MEMCTRL_STAT_RDATA_DUMMY)) {
564 /* Noise on the line */
565 goto read_error;
566 }
567 /* Bail out when we see first mismatch */
568 if (byte != training_data[i]) {
569 read_error:
570 return DDR_TRAIN_OK;
571 }
572 }
573 /* All data compares OK, increase DQS and retry */
574 if (!incr_dly(bytelane))
575 return DDR_TRAIN_ERROR;
576
577 return DDR_TRAIN_CONTINUE;
578}
579
580static inline int hal_vcoreiii_train_bytelane(u32 bytelane)
581{
582 register int res;
583 register u32 dqs_s;
584
585 set_dly(bytelane, 0); /* Start training at DQS=0 */
586 while ((res = look_for(bytelane)) == DDR_TRAIN_CONTINUE)
587 ;
588 if (res != DDR_TRAIN_OK)
589 return res;
590
591 dqs_s = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
592 while ((res = look_past(bytelane)) == DDR_TRAIN_CONTINUE)
593 ;
594 if (res != DDR_TRAIN_OK)
595 return res;
596 /* Reset FIFO - for good measure */
597 memphy_soft_reset();
598 /* Adjust to center [dqs_s;cur] */
599 center_dly(bytelane, dqs_s);
600 return DDR_TRAIN_OK;
601}
602
603/* This algorithm is converted from the TCL training algorithm used
604 * during silicon simulation.
605 * NB: Assumes inlining as no stack is available!
606 */
607static inline int hal_vcoreiii_init_dqs(void)
608{
609#define MAX_DQS 32
610 register u32 i, j;
611
612 for (i = 0; i < MAX_DQS; i++) {
613 set_dly(0, i); /* Byte-lane 0 */
614 for (j = 0; j < MAX_DQS; j++) {
615 __maybe_unused register u32 byte;
616
617 set_dly(1, j); /* Byte-lane 1 */
618 /* Reset FIFO in case any previous access failed */
619 memphy_soft_reset();
620 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
621 BASE_CFG + ICPU_MEMCTRL_STAT);
622 byte = __raw_readb((void __iomem *)MSCC_DDR_TO);
623 byte = __raw_readb((void __iomem *)(MSCC_DDR_TO + 1));
624 if (!(readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
625 (ICPU_MEMCTRL_STAT_RDATA_MASKED |
626 ICPU_MEMCTRL_STAT_RDATA_DUMMY)))
627 return 0;
628 }
629 }
630 return -1;
631}
632
633static inline int dram_check(void)
634{
635 register u32 i;
636
637 for (i = 0; i < 8; i++) {
638 __raw_writel(~i, (void __iomem *)(MSCC_DDR_TO + (i * 4)));
639 if (__raw_readl((void __iomem *)(MSCC_DDR_TO + (i * 4))) != ~i)
640 return 1;
641 }
642 return 0;
643}
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100644#else /* Luton */
645
646static inline void sleep_100ns(u32 val)
647{
648}
649
650static inline void hal_vcoreiii_ddr_reset_assert(void)
651{
652 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_RST);
653 setbits_le32(BASE_CFG + ICPU_RESET, ICPU_RESET_MEM_RST_FORCE);
654}
655
656static inline void hal_vcoreiii_ddr_reset_release(void)
657{
658}
659
660static inline void hal_vcoreiii_ddr_failed(void)
661{
662 register u32 memphy_cfg = readl(BASE_CFG + ICPU_MEMPHY_CFG);
663
664 /* Do a fifo reset and start over */
665 writel(memphy_cfg | ICPU_MEMPHY_CFG_PHY_FIFO_RST,
666 BASE_CFG + ICPU_MEMPHY_CFG);
667 writel(memphy_cfg & ~ICPU_MEMPHY_CFG_PHY_FIFO_RST,
668 BASE_CFG + ICPU_MEMPHY_CFG);
669 writel(memphy_cfg | ICPU_MEMPHY_CFG_PHY_FIFO_RST,
670 BASE_CFG + ICPU_MEMPHY_CFG);
671}
672
673static inline void hal_vcoreiii_ddr_verified(void)
674{
675}
676
677static inline int look_for(u32 data)
678{
679 register u32 byte = __raw_readb((void __iomem *)MSCC_DDR_TO);
680
681 if (data != byte) {
682 if (!incr_dly(0))
683 return DDR_TRAIN_ERROR;
684 return DDR_TRAIN_CONTINUE;
685 }
686
687 return DDR_TRAIN_OK;
688}
689
690/* This algorithm is converted from the TCL training algorithm used
691 * during silicon simulation.
692 * NB: Assumes inlining as no stack is available!
693 */
694static inline int hal_vcoreiii_train_bytelane(u32 bytelane)
695{
696 register int res;
697
698 set_dly(bytelane, 0); /* Start training at DQS=0 */
699 while ((res = look_for(0xff)) == DDR_TRAIN_CONTINUE)
700 ;
701 if (res != DDR_TRAIN_OK)
702 return res;
703
704 set_dly(bytelane, 0); /* Start training at DQS=0 */
705 while ((res = look_for(0x00)) == DDR_TRAIN_CONTINUE)
706
707 ;
708
709 if (res != DDR_TRAIN_OK)
710 return res;
711
712 adjust_dly(-3);
713
714 return DDR_TRAIN_OK;
715}
716
717static inline int hal_vcoreiii_init_dqs(void)
718{
719 return 0;
720}
721
722static inline int dram_check(void)
723{
724 register u32 i;
725
726 for (i = 0; i < 8; i++) {
727 __raw_writel(~i, (void __iomem *)(MSCC_DDR_TO + (i * 4)));
728
729 if (__raw_readl((void __iomem *)(MSCC_DDR_TO + (i * 4))) != ~i)
730 return 1;
731 }
732
733 return 0;
734}
735#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100736
737/*
738 * NB: Called *early* to init memory controller - assumes inlining as
739 * no stack is available!
740 */
741static inline void hal_vcoreiii_init_memctl(void)
742{
743 /* Ensure DDR is in reset */
744 hal_vcoreiii_ddr_reset_assert();
745
746 /* Wait maybe not needed, but ... */
747 PAUSE();
748
749 /* Drop sys ctl memory controller forced reset */
750 clrbits_le32(BASE_CFG + ICPU_RESET, ICPU_RESET_MEM_RST_FORCE);
751
752 PAUSE();
753
754 /* Drop Reset, enable SSTL */
755 writel(ICPU_MEMPHY_CFG_PHY_SSTL_ENA, BASE_CFG + ICPU_MEMPHY_CFG);
756 PAUSE();
757
758 /* Start the automatic SSTL output and ODT drive-strength calibration */
759 writel(ICPU_MEMPHY_ZCAL_ZCAL_PROG_ODT(MIPS_VCOREIII_MEMORY_SSTL_ODT) |
760 /* drive strength */
761 ICPU_MEMPHY_ZCAL_ZCAL_PROG(MIPS_VCOREIII_MEMORY_SSTL_DRIVE) |
762 /* Start calibration process */
763 ICPU_MEMPHY_ZCAL_ZCAL_ENA, BASE_CFG + ICPU_MEMPHY_ZCAL);
764
765 /* Wait for ZCAL to clear */
766 while (readl(BASE_CFG + ICPU_MEMPHY_ZCAL) & ICPU_MEMPHY_ZCAL_ZCAL_ENA)
767 ;
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100768#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
769 defined(CONFIG_SOC_SERVALT)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100770 /* Check no ZCAL_ERR */
771 if (readl(BASE_CFG + ICPU_MEMPHY_ZCAL_STAT)
772 & ICPU_MEMPHY_ZCAL_STAT_ZCAL_ERR)
773 hal_vcoreiii_ddr_failed();
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100774#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100775 /* Drive CL, CK, ODT */
776 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_ODT_OE |
777 ICPU_MEMPHY_CFG_PHY_CK_OE | ICPU_MEMPHY_CFG_PHY_CL_OE);
778
779 /* Initialize memory controller */
780 writel(MSCC_MEMPARM_MEMCFG, BASE_CFG + ICPU_MEMCTRL_CFG);
781 writel(MSCC_MEMPARM_PERIOD, BASE_CFG + ICPU_MEMCTRL_REF_PERIOD);
782
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100783#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur914e7872019-01-23 16:39:42 +0100784 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100785 writel(MSCC_MEMPARM_TIMING0, BASE_CFG + ICPU_MEMCTRL_TIMING0);
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100786#else /* Luton */
787 clrbits_le32(BASE_CFG + ICPU_MEMCTRL_TIMING0, ((1 << 20) - 1));
788 setbits_le32(BASE_CFG + ICPU_MEMCTRL_TIMING0, MSCC_MEMPARM_TIMING0);
789#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100790
791 writel(MSCC_MEMPARM_TIMING1, BASE_CFG + ICPU_MEMCTRL_TIMING1);
792 writel(MSCC_MEMPARM_TIMING2, BASE_CFG + ICPU_MEMCTRL_TIMING2);
793 writel(MSCC_MEMPARM_TIMING3, BASE_CFG + ICPU_MEMCTRL_TIMING3);
794 writel(MSCC_MEMPARM_MR0, BASE_CFG + ICPU_MEMCTRL_MR0_VAL);
795 writel(MSCC_MEMPARM_MR1, BASE_CFG + ICPU_MEMCTRL_MR1_VAL);
796 writel(MSCC_MEMPARM_MR2, BASE_CFG + ICPU_MEMCTRL_MR2_VAL);
797 writel(MSCC_MEMPARM_MR3, BASE_CFG + ICPU_MEMCTRL_MR3_VAL);
798
Horatiu Vultur914e7872019-01-23 16:39:42 +0100799#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100800 /* Termination setup - enable ODT */
801 writel(ICPU_MEMCTRL_TERMRES_CTRL_LOCAL_ODT_RD_ENA |
802 /* Assert ODT0 for any write */
803 ICPU_MEMCTRL_TERMRES_CTRL_ODT_WR_ENA(3),
804 BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
805
806 /* Release Reset from DDR */
Horatiu Vultur914e7872019-01-23 16:39:42 +0100807#if defined(CONFIG_SOC_OCELOT)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100808 hal_vcoreiii_ddr_reset_release();
Horatiu Vultur914e7872019-01-23 16:39:42 +0100809#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100810
811 writel(readl(BASE_CFG + ICPU_GPR(7)) + 1, BASE_CFG + ICPU_GPR(7));
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100812#elif defined(CONFIG_SOC_JR2) || defined(CONFIG_SOC_SERVALT)
Horatiu Vultur8a22b882019-01-12 18:56:56 +0100813 writel(ICPU_MEMCTRL_TERMRES_CTRL_ODT_WR_ENA(3),
814 BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100815#else /* Luton */
816 /* Termination setup - disable ODT */
817 writel(0, BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
818
819#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100820}
821
822static inline void hal_vcoreiii_wait_memctl(void)
823{
824 /* Now, rip it! */
825 writel(ICPU_MEMCTRL_CTRL_INITIALIZE, BASE_CFG + ICPU_MEMCTRL_CTRL);
826
827 while (!(readl(BASE_CFG + ICPU_MEMCTRL_STAT)
828 & ICPU_MEMCTRL_STAT_INIT_DONE))
829 ;
830
831 /* Settle...? */
832 sleep_100ns(10000);
Horatiu Vulturc15620a2019-01-17 15:33:27 +0100833#if defined(CONFIG_SOC_OCELOT) || defined(CONFIG_SOC_JR2) || \
Horatiu Vultur914e7872019-01-23 16:39:42 +0100834 defined(CONFIG_SOC_SERVALT) || defined(CONFIG_SOC_SERVAL)
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100835 /* Establish data contents in DDR RAM for training */
836
837 __raw_writel(0xcacafefe, ((void __iomem *)MSCC_DDR_TO));
838 __raw_writel(0x22221111, ((void __iomem *)MSCC_DDR_TO + 0x4));
839 __raw_writel(0x44443333, ((void __iomem *)MSCC_DDR_TO + 0x8));
840 __raw_writel(0x66665555, ((void __iomem *)MSCC_DDR_TO + 0xC));
841 __raw_writel(0x88887777, ((void __iomem *)MSCC_DDR_TO + 0x10));
842 __raw_writel(0xaaaa9999, ((void __iomem *)MSCC_DDR_TO + 0x14));
843 __raw_writel(0xccccbbbb, ((void __iomem *)MSCC_DDR_TO + 0x18));
844 __raw_writel(0xeeeedddd, ((void __iomem *)MSCC_DDR_TO + 0x1C));
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100845#else
846 __raw_writel(0xff, ((void __iomem *)MSCC_DDR_TO));
847#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100848}
849#endif /* __ASM_MACH_DDR_H */