blob: f445e63a35ed5ba35312315f7a7fb28e2877170a [file] [log] [blame]
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +01001/* SPDX-License-Identifier: (GPL-2.0+ OR MIT) */
2/*
3 * Copyright (c) 2018 Microsemi Corporation
4 */
5
6#ifndef __ASM_MACH_DDR_H
7#define __ASM_MACH_DDR_H
8
9#include <asm/cacheops.h>
10#include <asm/io.h>
11#include <asm/reboot.h>
12#include <mach/common.h>
13
14#define MIPS_VCOREIII_MEMORY_DDR3
15#define MIPS_VCOREIII_DDR_SIZE CONFIG_SYS_SDRAM_SIZE
16
17#if defined(CONFIG_DDRTYPE_H5TQ1G63BFA) /* Serval1 Refboard */
18
19/* Hynix H5TQ1G63BFA (1Gbit DDR3, x16) @ 3.20ns */
20#define VC3_MPAR_bank_addr_cnt 3
21#define VC3_MPAR_row_addr_cnt 13
22#define VC3_MPAR_col_addr_cnt 10
23#define VC3_MPAR_tREFI 2437
24#define VC3_MPAR_tRAS_min 12
25#define VC3_MPAR_CL 6
26#define VC3_MPAR_tWTR 4
27#define VC3_MPAR_tRC 16
28#define VC3_MPR_tFAW 16
29#define VC3_MPAR_tRP 5
30#define VC3_MPAR_tRRD 4
31#define VC3_MPAR_tRCD 5
32#define VC3_MPAR_tMRD 4
33#define VC3_MPAR_tRFC 35
34#define VC3_MPAR_CWL 5
35#define VC3_MPAR_tXPR 38
36#define VC3_MPAR_tMOD 12
37#define VC3_MPAR_tDLLK 512
38#define VC3_MPAR_tWR 5
39
40#elif defined(CONFIG_DDRTYPE_MT41J128M16HA) /* Validation board */
41
42/* Micron MT41J128M16HA-15E:D (2Gbit DDR3, x16) @ 3.20ns */
43#define VC3_MPAR_bank_addr_cnt 3
44#define VC3_MPAR_row_addr_cnt 14
45#define VC3_MPAR_col_addr_cnt 10
46#define VC3_MPAR_tREFI 2437
47#define VC3_MPAR_tRAS_min 12
48#define VC3_MPAR_CL 5
49#define VC3_MPAR_tWTR 4
50#define VC3_MPAR_tRC 16
51#define VC3_MPAR_tFAW 16
52#define VC3_MPAR_tRP 5
53#define VC3_MPAR_tRRD 4
54#define VC3_MPAR_tRCD 5
55#define VC3_MPAR_tMRD 4
56#define VC3_MPAR_tRFC 50
57#define VC3_MPAR_CWL 5
58#define VC3_MPAR_tXPR 54
59#define VC3_MPAR_tMOD 12
60#define VC3_MPAR_tDLLK 512
61#define VC3_MPAR_tWR 5
62
63#elif defined(CONFIG_DDRTYPE_MT41K256M16) /* JR2 Validation board */
64
65/* Micron MT41K256M16 (4Gbit, DDR3L-800, 256Mbitx16) @ 3.20ns */
66#define VC3_MPAR_bank_addr_cnt 3
67#define VC3_MPAR_row_addr_cnt 15
68#define VC3_MPAR_col_addr_cnt 10
69#define VC3_MPAR_tREFI 2437
70#define VC3_MPAR_tRAS_min 12
71#define VC3_MPAR_CL 5
72#define VC3_MPAR_tWTR 4
73#define VC3_MPAR_tRC 16
74#define VC3_MPAR_tFAW 16
75#define VC3_MPAR_tRP 5
76#define VC3_MPAR_tRRD 4
77#define VC3_MPAR_tRCD 5
78#define VC3_MPAR_tMRD 4
79#define VC3_MPAR_tRFC 82
80#define VC3_MPAR_CWL 5
81#define VC3_MPAR_tXPR 85
82#define VC3_MPAR_tMOD 12
83#define VC3_MPAR_tDLLK 512
84#define VC3_MPAR_tWR 5
85
86#elif defined(CONFIG_DDRTYPE_H5TQ4G63MFR) /* JR2 Reference board */
87
88/* Hynix H5TQ4G63MFR-PBC (4Gbit, DDR3-800, 256Mbitx16) - 2kb pages @ 3.20ns */
89#define VC3_MPAR_bank_addr_cnt 3
90#define VC3_MPAR_row_addr_cnt 15
91#define VC3_MPAR_col_addr_cnt 10
92#define VC3_MPAR_tREFI 2437
93#define VC3_MPAR_tRAS_min 12
94#define VC3_MPAR_CL 6
95#define VC3_MPAR_tWTR 4
96#define VC3_MPAR_tRC 17
97#define VC3_MPAR_tFAW 16
98#define VC3_MPAR_tRP 5
99#define VC3_MPAR_tRRD 4
100#define VC3_MPAR_tRCD 5
101#define VC3_MPAR_tMRD 4
102#define VC3_MPAR_tRFC 82
103#define VC3_MPAR_CWL 5
104#define VC3_MPAR_tXPR 85
105#define VC3_MPAR_tMOD 12
106#define VC3_MPAR_tDLLK 512
107#define VC3_MPAR_tWR 5
108
109#elif defined(CONFIG_DDRTYPE_MT41K128M16JT)
110
111/* Micron Micron MT41K128M16JT-125 (2Gbit DDR3L, 128Mbitx16) @ 3.20ns */
112#define VC3_MPAR_bank_addr_cnt 3
113#define VC3_MPAR_row_addr_cnt 14
114#define VC3_MPAR_col_addr_cnt 10
115#define VC3_MPAR_tREFI 2437
116#define VC3_MPAR_tRAS_min 12
117#define VC3_MPAR_CL 6
118#define VC3_MPAR_tWTR 4
119#define VC3_MPAR_tRC 16
120#define VC3_MPAR_tFAW 16
121#define VC3_MPAR_tRP 5
122#define VC3_MPAR_tRRD 4
123#define VC3_MPAR_tRCD 5
124#define VC3_MPAR_tMRD 4
125#define VC3_MPAR_tRFC 82
126#define VC3_MPAR_CWL 5
127#define VC3_MPAR_tXPR 85
128#define VC3_MPAR_tMOD 12
129#define VC3_MPAR_tDLLK 512
130#define VC3_MPAR_tWR 5
131
132#elif defined(CONFIG_DDRTYPE_MT47H128M8HQ) /* Luton10/26 Refboards */
133
134/* Micron 1Gb MT47H128M8-3 16Meg x 8 x 8 banks, DDR-533@CL4 @ 4.80ns */
135#define VC3_MPAR_bank_addr_cnt 3
136#define VC3_MPAR_row_addr_cnt 14
137#define VC3_MPAR_col_addr_cnt 10
138#define VC3_MPAR_tREFI 1625
139#define VC3_MPAR_tRAS_min 9
140#define VC3_MPAR_CL 4
141#define VC3_MPAR_tWTR 2
142#define VC3_MPAR_tRC 12
143#define VC3_MPAR_tFAW 8
144#define VC3_MPAR_tRP 4
145#define VC3_MPAR_tRRD 2
146#define VC3_MPAR_tRCD 4
147
148#define VC3_MPAR_tRPA 4
149#define VC3_MPAR_tRP 4
150
151#define VC3_MPAR_tMRD 2
152#define VC3_MPAR_tRFC 27
153
154#define VC3_MPAR__400_ns_dly 84
155
156#define VC3_MPAR_tWR 4
157#undef MIPS_VCOREIII_MEMORY_DDR3
158#else
159
160#error Unknown DDR system configuration - please add!
161
162#endif
163
164#ifdef CONFIG_SOC_OCELOT
165#define MIPS_VCOREIII_MEMORY_16BIT 1
166#endif
167
168#define MIPS_VCOREIII_MEMORY_SSTL_ODT 7
169#define MIPS_VCOREIII_MEMORY_SSTL_DRIVE 7
170#define VCOREIII_DDR_DQS_MODE_CALIBRATE
171
172#ifdef MIPS_VCOREIII_MEMORY_16BIT
173#define VC3_MPAR_16BIT 1
174#else
175#define VC3_MPAR_16BIT 0
176#endif
177
178#ifdef MIPS_VCOREIII_MEMORY_DDR3
179#define VC3_MPAR_DDR3_MODE 1 /* DDR3 */
180#define VC3_MPAR_BURST_LENGTH 8 /* Always 8 (1) for DDR3 */
181#ifdef MIPS_VCOREIII_MEMORY_16BIT
182#define VC3_MPAR_BURST_SIZE 1 /* Always 1 for DDR3/16bit */
183#else
184#define VC3_MPAR_BURST_SIZE 0
185#endif
186#else
187#define VC3_MPAR_DDR3_MODE 0 /* DDR2 */
188#ifdef MIPS_VCOREIII_MEMORY_16BIT
189#define VC3_MPAR_BURST_LENGTH 4 /* in DDR2 16-bit mode, use burstlen 4 */
190#else
191#define VC3_MPAR_BURST_LENGTH 8 /* For 8-bit IF we must run burst-8 */
192#endif
193#define VC3_MPAR_BURST_SIZE 0 /* Always 0 for DDR2 */
194#endif
195
196#define VC3_MPAR_RL VC3_MPAR_CL
197#if !defined(MIPS_VCOREIII_MEMORY_DDR3)
198#define VC3_MPAR_WL (VC3_MPAR_RL - 1)
199#define VC3_MPAR_MD VC3_MPAR_tMRD
200#define VC3_MPAR_ID VC3_MPAR__400_ns_dly
201#define VC3_MPAR_SD VC3_MPAR_tXSRD
202#define VC3_MPAR_OW (VC3_MPAR_WL - 2)
203#define VC3_MPAR_OR (VC3_MPAR_WL - 3)
204#define VC3_MPAR_RP (VC3_MPAR_bank_addr_cnt < 3 ? VC3_MPAR_tRP : VC3_MPAR_tRPA)
205#define VC3_MPAR_FAW (VC3_MPAR_bank_addr_cnt < 3 ? 1 : VC3_MPAR_tFAW)
206#define VC3_MPAR_BL (VC3_MPAR_BURST_LENGTH == 4 ? 2 : 4)
207#define MSCC_MEMPARM_MR0 \
208 (VC3_MPAR_BURST_LENGTH == 8 ? 3 : 2) | (VC3_MPAR_CL << 4) | \
209 ((VC3_MPAR_tWR - 1) << 9)
210/* DLL-on, Full-OD, AL=0, RTT=off, nDQS-on, RDQS-off, out-en */
211#define MSCC_MEMPARM_MR1 0x382
212#define MSCC_MEMPARM_MR2 0
213#define MSCC_MEMPARM_MR3 0
214#else
215#define VC3_MPAR_WL VC3_MPAR_CWL
216#define VC3_MPAR_MD VC3_MPAR_tMOD
217#define VC3_MPAR_ID VC3_MPAR_tXPR
218#define VC3_MPAR_SD VC3_MPAR_tDLLK
219#define VC3_MPAR_OW 2
220#define VC3_MPAR_OR 2
221#define VC3_MPAR_RP VC3_MPAR_tRP
222#define VC3_MPAR_FAW VC3_MPAR_tFAW
223#define VC3_MPAR_BL 4
224#define MSCC_MEMPARM_MR0 ((VC3_MPAR_RL - 4) << 4) | ((VC3_MPAR_tWR - 4) << 9)
225/* ODT_RTT: “0x0040” for 120ohm, and “0x0004” for 60ohm. */
226#define MSCC_MEMPARM_MR1 0x0040
227#define MSCC_MEMPARM_MR2 ((VC3_MPAR_WL - 5) << 3)
228#define MSCC_MEMPARM_MR3 0
229#endif /* MIPS_VCOREIII_MEMORY_DDR3 */
230
231#define MSCC_MEMPARM_MEMCFG \
232 ((MIPS_VCOREIII_DDR_SIZE > SZ_512M) ? \
233 ICPU_MEMCTRL_CFG_DDR_512MBYTE_PLUS : 0) | \
234 (VC3_MPAR_16BIT ? ICPU_MEMCTRL_CFG_DDR_WIDTH : 0) | \
235 (VC3_MPAR_DDR3_MODE ? ICPU_MEMCTRL_CFG_DDR_MODE : 0) | \
236 (VC3_MPAR_BURST_SIZE ? ICPU_MEMCTRL_CFG_BURST_SIZE : 0) | \
237 (VC3_MPAR_BURST_LENGTH == 8 ? ICPU_MEMCTRL_CFG_BURST_LEN : 0) | \
238 (VC3_MPAR_bank_addr_cnt == 3 ? ICPU_MEMCTRL_CFG_BANK_CNT : 0) | \
239 ICPU_MEMCTRL_CFG_MSB_ROW_ADDR(VC3_MPAR_row_addr_cnt - 1) | \
240 ICPU_MEMCTRL_CFG_MSB_COL_ADDR(VC3_MPAR_col_addr_cnt - 1)
241
242#ifdef CONFIG_SOC_OCELOT
243#define MSCC_MEMPARM_PERIOD \
244 ICPU_MEMCTRL_REF_PERIOD_MAX_PEND_REF(8) | \
245 ICPU_MEMCTRL_REF_PERIOD_REF_PERIOD(VC3_MPAR_tREFI)
246
247#define MSCC_MEMPARM_TIMING0 \
248 ICPU_MEMCTRL_TIMING0_RD_TO_WR_DLY(VC3_MPAR_RL + VC3_MPAR_BL + 1 - \
249 VC3_MPAR_WL) | \
250 ICPU_MEMCTRL_TIMING0_WR_CS_CHANGE_DLY(VC3_MPAR_BL - 1) | \
251 ICPU_MEMCTRL_TIMING0_RD_CS_CHANGE_DLY(VC3_MPAR_BL) | \
252 ICPU_MEMCTRL_TIMING0_RAS_TO_PRECH_DLY(VC3_MPAR_tRAS_min - 1) | \
253 ICPU_MEMCTRL_TIMING0_WR_TO_PRECH_DLY(VC3_MPAR_WL + \
254 VC3_MPAR_BL + \
255 VC3_MPAR_tWR - 1) | \
256 ICPU_MEMCTRL_TIMING0_RD_TO_PRECH_DLY(VC3_MPAR_BL - 1) | \
257 ICPU_MEMCTRL_TIMING0_WR_DATA_XFR_DLY(VC3_MPAR_WL - 1) | \
258 ICPU_MEMCTRL_TIMING0_RD_DATA_XFR_DLY(VC3_MPAR_RL - 3)
259
260#define MSCC_MEMPARM_TIMING1 \
261 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_SAME_BANK_DLY(VC3_MPAR_tRC - 1) | \
262 ICPU_MEMCTRL_TIMING1_BANK8_FAW_DLY(VC3_MPAR_FAW - 1) | \
263 ICPU_MEMCTRL_TIMING1_PRECH_TO_RAS_DLY(VC3_MPAR_RP - 1) | \
264 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_DLY(VC3_MPAR_tRRD - 1) | \
265 ICPU_MEMCTRL_TIMING1_RAS_TO_CAS_DLY(VC3_MPAR_tRCD - 1) | \
266 ICPU_MEMCTRL_TIMING1_WR_TO_RD_DLY(VC3_MPAR_WL + \
267 VC3_MPAR_BL + \
268 VC3_MPAR_tWTR - 1)
269
270#define MSCC_MEMPARM_TIMING2 \
271 ICPU_MEMCTRL_TIMING2_PRECH_ALL_DLY(VC3_MPAR_RP - 1) | \
272 ICPU_MEMCTRL_TIMING2_MDSET_DLY(VC3_MPAR_MD - 1) | \
273 ICPU_MEMCTRL_TIMING2_REF_DLY(VC3_MPAR_tRFC - 1) | \
274 ICPU_MEMCTRL_TIMING2_INIT_DLY(VC3_MPAR_ID - 1)
275
276#define MSCC_MEMPARM_TIMING3 \
277 ICPU_MEMCTRL_TIMING3_WR_TO_RD_CS_CHANGE_DLY(VC3_MPAR_WL + \
278 VC3_MPAR_tWTR - 1) |\
279 ICPU_MEMCTRL_TIMING3_ODT_RD_DLY(VC3_MPAR_OR - 1) | \
280 ICPU_MEMCTRL_TIMING3_ODT_WR_DLY(VC3_MPAR_OW - 1) | \
281 ICPU_MEMCTRL_TIMING3_LOCAL_ODT_RD_DLY(VC3_MPAR_RL - 3)
282
283#else
284#define MSCC_MEMPARM_PERIOD \
285 ICPU_MEMCTRL_REF_PERIOD_MAX_PEND_REF(1) | \
286 ICPU_MEMCTRL_REF_PERIOD_REF_PERIOD(VC3_MPAR_tREFI)
287
288#define MSCC_MEMPARM_TIMING0 \
289 ICPU_MEMCTRL_TIMING0_RAS_TO_PRECH_DLY(VC3_MPAR_tRAS_min - 1) | \
290 ICPU_MEMCTRL_TIMING0_WR_TO_PRECH_DLY(VC3_MPAR_CL + \
291 (VC3_MPAR_BURST_LENGTH == 8 ? 2 : 0) + \
292 VC3_MPAR_tWR) | \
293 ICPU_MEMCTRL_TIMING0_RD_TO_PRECH_DLY(VC3_MPAR_BURST_LENGTH == 8 ? 3 : 1) | \
294 ICPU_MEMCTRL_TIMING0_WR_DATA_XFR_DLY(VC3_MPAR_CL - 3) | \
295 ICPU_MEMCTRL_TIMING0_RD_DATA_XFR_DLY(VC3_MPAR_CL - 3)
296
297#define MSCC_MEMPARM_TIMING1 \
298 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_SAME_BANK_DLY(VC3_MPAR_tRC - 1) | \
299 ICPU_MEMCTRL_TIMING1_BANK8_FAW_DLY(VC3_MPAR_tFAW - 1) | \
300 ICPU_MEMCTRL_TIMING1_PRECH_TO_RAS_DLY(VC3_MPAR_tRP - 1) | \
301 ICPU_MEMCTRL_TIMING1_RAS_TO_RAS_DLY(VC3_MPAR_tRRD - 1) | \
302 ICPU_MEMCTRL_TIMING1_RAS_TO_CAS_DLY(VC3_MPAR_tRCD - 1) | \
303 ICPU_MEMCTRL_TIMING1_WR_TO_RD_DLY(VC3_MPAR_CL + \
304 (VC3_MPAR_BURST_LENGTH == 8 ? 2 : 0) + \
305 VC3_MPAR_tWTR)
306#define MSCC_MEMPARM_TIMING2 \
307 ICPU_MEMCTRL_TIMING2_PRECH_ALL_DLY(VC3_MPAR_tRPA - 1) | \
308 ICPU_MEMCTRL_TIMING2_MDSET_DLY(VC3_MPAR_tMRD - 1) | \
309 ICPU_MEMCTRL_TIMING2_REF_DLY(VC3_MPAR_tRFC - 1) | \
310 ICPU_MEMCTRL_TIMING2_FOUR_HUNDRED_NS_DLY(VC3_MPAR__400_ns_dly)
311
312#define MSCC_MEMPARM_TIMING3 \
313 ICPU_MEMCTRL_TIMING3_WR_TO_RD_CS_CHANGE_DLY(VC3_MPAR_CL - 1) | \
314 ICPU_MEMCTRL_TIMING3_ODT_WR_DLY(VC3_MPAR_CL - 1) | \
315 ICPU_MEMCTRL_TIMING3_LOCAL_ODT_RD_DLY(VC3_MPAR_CL - 1)
316
317#endif
318
319enum {
320 DDR_TRAIN_OK,
321 DDR_TRAIN_CONTINUE,
322 DDR_TRAIN_ERROR,
323};
324
325/*
326 * We actually have very few 'pause' possibilities apart from
327 * these assembly nops (at this very early stage).
328 */
329#define PAUSE() asm volatile("nop; nop; nop; nop; nop; nop; nop; nop")
330
331/* NB: Assumes inlining as no stack is available! */
332static inline void set_dly(u32 bytelane, u32 dly)
333{
334 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
335
336 r &= ~ICPU_MEMCTRL_DQS_DLY_DQS_DLY_M;
337 r |= ICPU_MEMCTRL_DQS_DLY_DQS_DLY(dly);
338 writel(r, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
339}
340
341static inline bool incr_dly(u32 bytelane)
342{
343 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
344
345 if (ICPU_MEMCTRL_DQS_DLY_DQS_DLY(r) < 31) {
346 writel(r + 1, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
347 return true;
348 }
349
350 return false;
351}
352
353static inline bool adjust_dly(int adjust)
354{
355 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(0));
356
357 if (ICPU_MEMCTRL_DQS_DLY_DQS_DLY(r) < 31) {
358 writel(r + adjust, BASE_CFG + ICPU_MEMCTRL_DQS_DLY(0));
359 return true;
360 }
361
362 return false;
363}
364
365/* NB: Assumes inlining as no stack is available! */
366static inline void center_dly(u32 bytelane, u32 start)
367{
368 register u32 r = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane)) - start;
369
370 writel(start + (r >> 1), BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
371}
372
373static inline void memphy_soft_reset(void)
374{
375 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_FIFO_RST);
376 PAUSE();
377 clrbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_FIFO_RST);
378 PAUSE();
379}
380
381#ifdef CONFIG_SOC_OCELOT
382static u8 training_data[] = { 0xfe, 0x11, 0x33, 0x55, 0x77, 0x99, 0xbb, 0xdd };
383
384static inline void sleep_100ns(u32 val)
385{
386 /* Set the timer tick generator to 100 ns */
387 writel(VCOREIII_TIMER_DIVIDER - 1, BASE_CFG + ICPU_TIMER_TICK_DIV);
388
389 /* Set the timer value */
390 writel(val, BASE_CFG + ICPU_TIMER_VALUE(0));
391
392 /* Enable timer 0 for one-shot */
393 writel(ICPU_TIMER_CTRL_ONE_SHOT_ENA | ICPU_TIMER_CTRL_TIMER_ENA,
394 BASE_CFG + ICPU_TIMER_CTRL(0));
395
396 /* Wait for timer 0 to reach 0 */
397 while (readl(BASE_CFG + ICPU_TIMER_VALUE(0)) != 0)
398 ;
399}
400
401static inline void hal_vcoreiii_ddr_reset_assert(void)
402{
403 /* DDR has reset pin on GPIO 19 toggle Low-High to release */
404 setbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
405 writel(BIT(19), BASE_DEVCPU_GCB + PERF_GPIO_OUT_CLR);
406 sleep_100ns(10000);
407}
408
409static inline void hal_vcoreiii_ddr_reset_release(void)
410{
411 /* DDR has reset pin on GPIO 19 toggle Low-High to release */
412 setbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
413 writel(BIT(19), BASE_DEVCPU_GCB + PERF_GPIO_OUT_SET);
414 sleep_100ns(10000);
415}
416
417/*
418 * DDR memory sanity checking failed, tally and do hard reset
419 *
420 * NB: Assumes inlining as no stack is available!
421 */
422static inline void hal_vcoreiii_ddr_failed(void)
423{
424 register u32 reset;
425
426 writel(readl(BASE_CFG + ICPU_GPR(6)) + 1, BASE_CFG + ICPU_GPR(6));
427
428 clrbits_le32(BASE_DEVCPU_GCB + PERF_GPIO_OE, BIT(19));
429
430 /* We have to execute the reset function from cache. Indeed,
431 * the reboot workaround in _machine_restart() will change the
432 * SPI NOR into SW bitbang.
433 *
434 * This will render the CPU unable to execute directly from
435 * the NOR, which is why the reset instructions are prefetched
436 * into the I-cache.
437 *
438 * When failing the DDR initialization we are executing from
439 * NOR.
440 *
441 * The last instruction in _machine_restart() will reset the
442 * MIPS CPU (and the cache), and the CPU will start executing
443 * from the reset vector.
444 */
445 reset = KSEG0ADDR(_machine_restart);
446 icache_lock((void *)reset, 128);
447 asm volatile ("jr %0"::"r" (reset));
448
449 panic("DDR init failed\n");
450}
451
452/*
453 * DDR memory sanity checking done, possibly enable ECC.
454 *
455 * NB: Assumes inlining as no stack is available!
456 */
457static inline void hal_vcoreiii_ddr_verified(void)
458{
459#ifdef MIPS_VCOREIII_MEMORY_ECC
460 /* Finally, enable ECC */
461 register u32 val = readl(BASE_CFG + ICPU_MEMCTRL_CFG);
462
463 val |= ICPU_MEMCTRL_CFG_DDR_ECC_ERR_ENA;
464 val &= ~ICPU_MEMCTRL_CFG_BURST_SIZE;
465
466 writel(val, BASE_CFG + ICPU_MEMCTRL_CFG);
467#endif
468
469 /* Reset Status register - sticky bits */
470 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT), BASE_CFG + ICPU_MEMCTRL_STAT);
471}
472
473/* NB: Assumes inlining as no stack is available! */
474static inline int look_for(u32 bytelane)
475{
476 register u32 i;
477
478 /* Reset FIFO in case any previous access failed */
479 for (i = 0; i < sizeof(training_data); i++) {
480 register u32 byte;
481
482 memphy_soft_reset();
483 /* Reset sticky bits */
484 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
485 BASE_CFG + ICPU_MEMCTRL_STAT);
486 /* Read data */
487 byte = __raw_readb((void __iomem *)MSCC_DDR_TO + bytelane +
488 (i * 4));
489
490 /*
491 * Prevent the compiler reordering the instruction so
492 * the read of RAM happens after the check of the
493 * errors.
494 */
495 rmb();
496 if (readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
497 (ICPU_MEMCTRL_STAT_RDATA_MASKED |
498 ICPU_MEMCTRL_STAT_RDATA_DUMMY)) {
499 /* Noise on the line */
500 goto read_error;
501 }
502 /* If mismatch, increment DQS - if possible */
503 if (byte != training_data[i]) {
504 read_error:
505 if (!incr_dly(bytelane))
506 return DDR_TRAIN_ERROR;
507 return DDR_TRAIN_CONTINUE;
508 }
509 }
510 return DDR_TRAIN_OK;
511}
512
513/* NB: Assumes inlining as no stack is available! */
514static inline int look_past(u32 bytelane)
515{
516 register u32 i;
517
518 /* Reset FIFO in case any previous access failed */
519 for (i = 0; i < sizeof(training_data); i++) {
520 register u32 byte;
521
522 memphy_soft_reset();
523 /* Ack sticky bits */
524 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
525 BASE_CFG + ICPU_MEMCTRL_STAT);
526 byte = __raw_readb((void __iomem *)MSCC_DDR_TO + bytelane +
527 (i * 4));
528 /*
529 * Prevent the compiler reordering the instruction so
530 * the read of RAM happens after the check of the
531 * errors.
532 */
533 rmb();
534 if (readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
535 (ICPU_MEMCTRL_STAT_RDATA_MASKED |
536 ICPU_MEMCTRL_STAT_RDATA_DUMMY)) {
537 /* Noise on the line */
538 goto read_error;
539 }
540 /* Bail out when we see first mismatch */
541 if (byte != training_data[i]) {
542 read_error:
543 return DDR_TRAIN_OK;
544 }
545 }
546 /* All data compares OK, increase DQS and retry */
547 if (!incr_dly(bytelane))
548 return DDR_TRAIN_ERROR;
549
550 return DDR_TRAIN_CONTINUE;
551}
552
553static inline int hal_vcoreiii_train_bytelane(u32 bytelane)
554{
555 register int res;
556 register u32 dqs_s;
557
558 set_dly(bytelane, 0); /* Start training at DQS=0 */
559 while ((res = look_for(bytelane)) == DDR_TRAIN_CONTINUE)
560 ;
561 if (res != DDR_TRAIN_OK)
562 return res;
563
564 dqs_s = readl(BASE_CFG + ICPU_MEMCTRL_DQS_DLY(bytelane));
565 while ((res = look_past(bytelane)) == DDR_TRAIN_CONTINUE)
566 ;
567 if (res != DDR_TRAIN_OK)
568 return res;
569 /* Reset FIFO - for good measure */
570 memphy_soft_reset();
571 /* Adjust to center [dqs_s;cur] */
572 center_dly(bytelane, dqs_s);
573 return DDR_TRAIN_OK;
574}
575
576/* This algorithm is converted from the TCL training algorithm used
577 * during silicon simulation.
578 * NB: Assumes inlining as no stack is available!
579 */
580static inline int hal_vcoreiii_init_dqs(void)
581{
582#define MAX_DQS 32
583 register u32 i, j;
584
585 for (i = 0; i < MAX_DQS; i++) {
586 set_dly(0, i); /* Byte-lane 0 */
587 for (j = 0; j < MAX_DQS; j++) {
588 __maybe_unused register u32 byte;
589
590 set_dly(1, j); /* Byte-lane 1 */
591 /* Reset FIFO in case any previous access failed */
592 memphy_soft_reset();
593 writel(readl(BASE_CFG + ICPU_MEMCTRL_STAT),
594 BASE_CFG + ICPU_MEMCTRL_STAT);
595 byte = __raw_readb((void __iomem *)MSCC_DDR_TO);
596 byte = __raw_readb((void __iomem *)(MSCC_DDR_TO + 1));
597 if (!(readl(BASE_CFG + ICPU_MEMCTRL_STAT) &
598 (ICPU_MEMCTRL_STAT_RDATA_MASKED |
599 ICPU_MEMCTRL_STAT_RDATA_DUMMY)))
600 return 0;
601 }
602 }
603 return -1;
604}
605
606static inline int dram_check(void)
607{
608 register u32 i;
609
610 for (i = 0; i < 8; i++) {
611 __raw_writel(~i, (void __iomem *)(MSCC_DDR_TO + (i * 4)));
612 if (__raw_readl((void __iomem *)(MSCC_DDR_TO + (i * 4))) != ~i)
613 return 1;
614 }
615 return 0;
616}
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100617#else /* Luton */
618
619static inline void sleep_100ns(u32 val)
620{
621}
622
623static inline void hal_vcoreiii_ddr_reset_assert(void)
624{
625 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_RST);
626 setbits_le32(BASE_CFG + ICPU_RESET, ICPU_RESET_MEM_RST_FORCE);
627}
628
629static inline void hal_vcoreiii_ddr_reset_release(void)
630{
631}
632
633static inline void hal_vcoreiii_ddr_failed(void)
634{
635 register u32 memphy_cfg = readl(BASE_CFG + ICPU_MEMPHY_CFG);
636
637 /* Do a fifo reset and start over */
638 writel(memphy_cfg | ICPU_MEMPHY_CFG_PHY_FIFO_RST,
639 BASE_CFG + ICPU_MEMPHY_CFG);
640 writel(memphy_cfg & ~ICPU_MEMPHY_CFG_PHY_FIFO_RST,
641 BASE_CFG + ICPU_MEMPHY_CFG);
642 writel(memphy_cfg | ICPU_MEMPHY_CFG_PHY_FIFO_RST,
643 BASE_CFG + ICPU_MEMPHY_CFG);
644}
645
646static inline void hal_vcoreiii_ddr_verified(void)
647{
648}
649
650static inline int look_for(u32 data)
651{
652 register u32 byte = __raw_readb((void __iomem *)MSCC_DDR_TO);
653
654 if (data != byte) {
655 if (!incr_dly(0))
656 return DDR_TRAIN_ERROR;
657 return DDR_TRAIN_CONTINUE;
658 }
659
660 return DDR_TRAIN_OK;
661}
662
663/* This algorithm is converted from the TCL training algorithm used
664 * during silicon simulation.
665 * NB: Assumes inlining as no stack is available!
666 */
667static inline int hal_vcoreiii_train_bytelane(u32 bytelane)
668{
669 register int res;
670
671 set_dly(bytelane, 0); /* Start training at DQS=0 */
672 while ((res = look_for(0xff)) == DDR_TRAIN_CONTINUE)
673 ;
674 if (res != DDR_TRAIN_OK)
675 return res;
676
677 set_dly(bytelane, 0); /* Start training at DQS=0 */
678 while ((res = look_for(0x00)) == DDR_TRAIN_CONTINUE)
679
680 ;
681
682 if (res != DDR_TRAIN_OK)
683 return res;
684
685 adjust_dly(-3);
686
687 return DDR_TRAIN_OK;
688}
689
690static inline int hal_vcoreiii_init_dqs(void)
691{
692 return 0;
693}
694
695static inline int dram_check(void)
696{
697 register u32 i;
698
699 for (i = 0; i < 8; i++) {
700 __raw_writel(~i, (void __iomem *)(MSCC_DDR_TO + (i * 4)));
701
702 if (__raw_readl((void __iomem *)(MSCC_DDR_TO + (i * 4))) != ~i)
703 return 1;
704 }
705
706 return 0;
707}
708#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100709
710/*
711 * NB: Called *early* to init memory controller - assumes inlining as
712 * no stack is available!
713 */
714static inline void hal_vcoreiii_init_memctl(void)
715{
716 /* Ensure DDR is in reset */
717 hal_vcoreiii_ddr_reset_assert();
718
719 /* Wait maybe not needed, but ... */
720 PAUSE();
721
722 /* Drop sys ctl memory controller forced reset */
723 clrbits_le32(BASE_CFG + ICPU_RESET, ICPU_RESET_MEM_RST_FORCE);
724
725 PAUSE();
726
727 /* Drop Reset, enable SSTL */
728 writel(ICPU_MEMPHY_CFG_PHY_SSTL_ENA, BASE_CFG + ICPU_MEMPHY_CFG);
729 PAUSE();
730
731 /* Start the automatic SSTL output and ODT drive-strength calibration */
732 writel(ICPU_MEMPHY_ZCAL_ZCAL_PROG_ODT(MIPS_VCOREIII_MEMORY_SSTL_ODT) |
733 /* drive strength */
734 ICPU_MEMPHY_ZCAL_ZCAL_PROG(MIPS_VCOREIII_MEMORY_SSTL_DRIVE) |
735 /* Start calibration process */
736 ICPU_MEMPHY_ZCAL_ZCAL_ENA, BASE_CFG + ICPU_MEMPHY_ZCAL);
737
738 /* Wait for ZCAL to clear */
739 while (readl(BASE_CFG + ICPU_MEMPHY_ZCAL) & ICPU_MEMPHY_ZCAL_ZCAL_ENA)
740 ;
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100741#ifdef CONFIG_SOC_OCELOT
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100742 /* Check no ZCAL_ERR */
743 if (readl(BASE_CFG + ICPU_MEMPHY_ZCAL_STAT)
744 & ICPU_MEMPHY_ZCAL_STAT_ZCAL_ERR)
745 hal_vcoreiii_ddr_failed();
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100746#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100747 /* Drive CL, CK, ODT */
748 setbits_le32(BASE_CFG + ICPU_MEMPHY_CFG, ICPU_MEMPHY_CFG_PHY_ODT_OE |
749 ICPU_MEMPHY_CFG_PHY_CK_OE | ICPU_MEMPHY_CFG_PHY_CL_OE);
750
751 /* Initialize memory controller */
752 writel(MSCC_MEMPARM_MEMCFG, BASE_CFG + ICPU_MEMCTRL_CFG);
753 writel(MSCC_MEMPARM_PERIOD, BASE_CFG + ICPU_MEMCTRL_REF_PERIOD);
754
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100755#ifdef CONFIG_SOC_OCELOT
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100756 writel(MSCC_MEMPARM_TIMING0, BASE_CFG + ICPU_MEMCTRL_TIMING0);
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100757#else /* Luton */
758 clrbits_le32(BASE_CFG + ICPU_MEMCTRL_TIMING0, ((1 << 20) - 1));
759 setbits_le32(BASE_CFG + ICPU_MEMCTRL_TIMING0, MSCC_MEMPARM_TIMING0);
760#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100761
762 writel(MSCC_MEMPARM_TIMING1, BASE_CFG + ICPU_MEMCTRL_TIMING1);
763 writel(MSCC_MEMPARM_TIMING2, BASE_CFG + ICPU_MEMCTRL_TIMING2);
764 writel(MSCC_MEMPARM_TIMING3, BASE_CFG + ICPU_MEMCTRL_TIMING3);
765 writel(MSCC_MEMPARM_MR0, BASE_CFG + ICPU_MEMCTRL_MR0_VAL);
766 writel(MSCC_MEMPARM_MR1, BASE_CFG + ICPU_MEMCTRL_MR1_VAL);
767 writel(MSCC_MEMPARM_MR2, BASE_CFG + ICPU_MEMCTRL_MR2_VAL);
768 writel(MSCC_MEMPARM_MR3, BASE_CFG + ICPU_MEMCTRL_MR3_VAL);
769
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100770#ifdef CONFIG_SOC_OCELOT
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100771 /* Termination setup - enable ODT */
772 writel(ICPU_MEMCTRL_TERMRES_CTRL_LOCAL_ODT_RD_ENA |
773 /* Assert ODT0 for any write */
774 ICPU_MEMCTRL_TERMRES_CTRL_ODT_WR_ENA(3),
775 BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
776
777 /* Release Reset from DDR */
778 hal_vcoreiii_ddr_reset_release();
779
780 writel(readl(BASE_CFG + ICPU_GPR(7)) + 1, BASE_CFG + ICPU_GPR(7));
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100781#else /* Luton */
782 /* Termination setup - disable ODT */
783 writel(0, BASE_CFG + ICPU_MEMCTRL_TERMRES_CTRL);
784
785#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100786}
787
788static inline void hal_vcoreiii_wait_memctl(void)
789{
790 /* Now, rip it! */
791 writel(ICPU_MEMCTRL_CTRL_INITIALIZE, BASE_CFG + ICPU_MEMCTRL_CTRL);
792
793 while (!(readl(BASE_CFG + ICPU_MEMCTRL_STAT)
794 & ICPU_MEMCTRL_STAT_INIT_DONE))
795 ;
796
797 /* Settle...? */
798 sleep_100ns(10000);
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100799#ifdef CONFIG_SOC_OCELOT
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100800 /* Establish data contents in DDR RAM for training */
801
802 __raw_writel(0xcacafefe, ((void __iomem *)MSCC_DDR_TO));
803 __raw_writel(0x22221111, ((void __iomem *)MSCC_DDR_TO + 0x4));
804 __raw_writel(0x44443333, ((void __iomem *)MSCC_DDR_TO + 0x8));
805 __raw_writel(0x66665555, ((void __iomem *)MSCC_DDR_TO + 0xC));
806 __raw_writel(0x88887777, ((void __iomem *)MSCC_DDR_TO + 0x10));
807 __raw_writel(0xaaaa9999, ((void __iomem *)MSCC_DDR_TO + 0x14));
808 __raw_writel(0xccccbbbb, ((void __iomem *)MSCC_DDR_TO + 0x18));
809 __raw_writel(0xeeeedddd, ((void __iomem *)MSCC_DDR_TO + 0x1C));
Gregory CLEMENT819b57212018-12-14 16:16:48 +0100810#else
811 __raw_writel(0xff, ((void __iomem *)MSCC_DDR_TO));
812#endif
Gregory CLEMENTaf05ee52018-12-14 16:16:47 +0100813}
814#endif /* __ASM_MACH_DDR_H */