blob: e375b5c9dfa14a7adc61b1328309494003754431 [file] [log] [blame]
Tom Rinidec7ea02024-05-20 13:35:03 -06001// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * (C) Copyright 2022 - Analog Devices, Inc.
4 *
5 * Written and/or maintained by Timesys Corporation
6 *
7 * Contact: Nathan Barrett-Morrison <nathan.morrison@timesys.com>
8 * Contact: Greg Malysa <greg.malysa@timesys.com>
9 */
10
11#include <asm/io.h>
12#include <asm/arch-adi/sc5xx/sc5xx.h>
13#include <linux/types.h>
14#include "clkinit.h"
15#include "dmcinit.h"
16
17#define REG_DMC0_BASE 0x31070000
18#define REG_DMC1_BASE 0x31073000
19
20#define REG_DMC_CTL 0x0004 // Control Register
21#define REG_DMC_STAT 0x0008 // Status Register
22#define REG_DMC_CFG 0x0040 // Configuration Register
23#define REG_DMC_TR0 0x0044 // Timing 0 Register
24#define REG_DMC_TR1 0x0048 // Timing 1 Register
25#define REG_DMC_TR2 0x004C // Timing 2 Register
26#define REG_DMC_MR 0x0060 // Shadow MR Register (DDR3)
27#define REG_DMC_EMR1 0x0064 // Shadow EMR1 Register
28#define REG_DMC_EMR2 0x0068 // Shadow EMR2 Register
29#define REG_DMC_EMR3 0x006C
30#define REG_DMC_DLLCTL 0x0080 // DLL Control Register
31#define REG_DMC_DT_CALIB_ADDR 0x0090 // Data Calibration Address Register
32#define REG_DMC_CPHY_CTL 0x01C0 // Controller to PHY Interface Register
33
34/* SC57x && SC58x DMC REGs */
35#define REG_DMC_PHY_CTL0 0x1000 // PHY Control 0 Register
36#define REG_DMC_PHY_CTL1 0x1004 // PHY Control 1 Register
37#define REG_DMC_PHY_CTL2 0x1008 // PHY Control 2 Register
38#define REG_DMC_PHY_CTL3 0x100c // PHY Control 3 Register
39#define REG_DMC_PHY_CTL4 0x1010 // PHY Control 4 Register
40#define REG_DMC_CAL_PADCTL0 0x1034 // CALIBRATION PAD CTL 0 Register
41#define REG_DMC_CAL_PADCTL2 0x103C // CALIBRATION PAD CTL2 Register
42/* END */
43
44/* SC59x DMC REGs */
45#define REG_DMC_DDR_LANE0_CTL0 0x1000 // Data Lane 0 Control Register 0
46#define REG_DMC_DDR_LANE0_CTL1 0x1004 // Data Lane 0 Control Register 1
47#define REG_DMC_DDR_LANE1_CTL0 0x100C // Data Lane 1 Control Register 0
48#define REG_DMC_DDR_LANE1_CTL1 0x1010 // Data Lane 1 Control Register 1
49#define REG_DMC_DDR_ROOT_CTL 0x1018 // DDR ROOT Module Control Register
50#define REG_DMC_DDR_ZQ_CTL0 0x1034 // DDR Calibration Control Register 0
51#define REG_DMC_DDR_ZQ_CTL1 0x1038 // DDR Calibration Control Register 1
52#define REG_DMC_DDR_ZQ_CTL2 0x103C // DDR Calibration Control Register 2
53#define REG_DMC_DDR_CA_CTL 0x1068 // DDR CA Lane Control Register
54/* END */
55
56#define REG_DMC_DDR_SCRATCH_2 0x1074
57#define REG_DMC_DDR_SCRATCH_3 0x1078
58#define REG_DMC_DDR_SCRATCH_6 0x1084
59#define REG_DMC_DDR_SCRATCH_7 0x1088
60
61#define REG_DMC_DDR_SCRATCH_STAT0 0x107C
62#define REG_DMC_DDR_SCRATCH_STAT1 0x1080
63
64#define DMC0_DATA_CALIB_ADD 0x80000000
65#define DMC1_DATA_CALIB_ADD 0xC0000000
66
67#define BITM_DMC_CFG_EXTBANK 0x0000F000 /* External Banks */
68#define ENUM_DMC_CFG_EXTBANK1 0x00000000 /* EXTBANK: 1 External Bank */
69#define BITM_DMC_CFG_SDRSIZE 0x00000F00 /* SDRAM Size */
70#define ENUM_DMC_CFG_SDRSIZE64 0x00000000 /* SDRSIZE: 64M Bit SDRAM (LPDDR Only) */
71#define ENUM_DMC_CFG_SDRSIZE128 0x00000100 /* SDRSIZE: 128M Bit SDRAM (LPDDR Only) */
72#define ENUM_DMC_CFG_SDRSIZE256 0x00000200 /* SDRSIZE: 256M Bit SDRAM */
73#define ENUM_DMC_CFG_SDRSIZE512 0x00000300 /* SDRSIZE: 512M Bit SDRAM */
74#define ENUM_DMC_CFG_SDRSIZE1G 0x00000400 /* SDRSIZE: 1G Bit SDRAM */
75#define ENUM_DMC_CFG_SDRSIZE2G 0x00000500 /* SDRSIZE: 2G Bit SDRAM */
76#define ENUM_DMC_CFG_SDRSIZE4G 0x00000600 /* SDRSIZE: 4G Bit SDRAM */
77#define ENUM_DMC_CFG_SDRSIZE8G 0x00000700 /* SDRSIZE: 8G Bit SDRAM */
78#define BITM_DMC_CFG_SDRWID 0x000000F0 /* SDRAM Width */
79#define ENUM_DMC_CFG_SDRWID16 0x00000020 /* SDRWID: 16-Bit Wide SDRAM */
80#define BITM_DMC_CFG_IFWID 0x0000000F /* Interface Width */
81#define ENUM_DMC_CFG_IFWID16 0x00000002 /* IFWID: 16-Bit Wide Interface */
82
83#define BITM_DMC_CTL_DDR3EN 0x00000001
84#define BITM_DMC_CTL_INIT 0x00000004
85#define BITP_DMC_STAT_INITDONE 2 /* Initialization Done */
86#define BITM_DMC_STAT_INITDONE 0x00000004
87
88#define BITP_DMC_CTL_AL_EN 27
89#define BITP_DMC_CTL_ZQCL 25 /* ZQ Calibration Long */
90#define BITP_DMC_CTL_ZQCS 24 /* ZQ Calibration Short */
91#define BITP_DMC_CTL_DLLCAL 13 /* DLL Calibration Start */
92#define BITP_DMC_CTL_PPREF 12 /* Postpone Refresh */
93#define BITP_DMC_CTL_RDTOWR 9 /* Read-to-Write Cycle */
94#define BITP_DMC_CTL_ADDRMODE 8 /* Addressing (Page/Bank) Mode */
95#define BITP_DMC_CTL_RESET 7 /* Reset SDRAM */
96#define BITP_DMC_CTL_PREC 6 /* Precharge */
97#define BITP_DMC_CTL_DPDREQ 5 /* Deep Power Down Request */
98#define BITP_DMC_CTL_PDREQ 4 /* Power Down Request */
99#define BITP_DMC_CTL_SRREQ 3 /* Self Refresh Request */
100#define BITP_DMC_CTL_INIT 2 /* Initialize DRAM Start */
101#define BITP_DMC_CTL_LPDDR 1 /* Low Power DDR Mode */
102#define BITP_DMC_CTL_DDR3EN 0 /* DDR3 Mode */
103
104#ifdef CONFIG_TARGET_SC584_EZKIT
105 #define DMC_PADCTL2_VALUE 0x0078283C
106#elif CONFIG_TARGET_SC573_EZKIT
107 #define DMC_PADCTL2_VALUE 0x00782828
108#elif CONFIG_TARGET_SC589_MINI || CONFIG_TARGET_SC589_EZKIT
109 #define DMC_PADCTL2_VALUE 0x00783C3C
110#elif defined(CONFIG_SC57X) || defined(CONFIG_SC58X)
111 #error "PADCTL2 not specified for custom board!"
112#else
113 //Newer DMC. Legacy calibration obsolete
114 #define DMC_PADCTL2_VALUE 0x0
115#endif
116
117#define DMC_CPHYCTL_VALUE 0x0000001A
118
119#define BITP_DMC_MR1_QOFF 12 /* Output Buffer Enable */
120#define BITP_DMC_MR1_TDQS 11 /* Termination Data Strobe */
121#define BITP_DMC_MR1_RTT2 9 /* Rtt_nom */
122#define BITP_DMC_MR1_WL 7 /* Write Leveling Enable. */
123#define BITP_DMC_MR1_RTT1 6 /* Rtt_nom */
124#define BITP_DMC_MR1_DIC1 5 /* Output Driver Impedance Control */
125#define BITP_DMC_MR1_AL 3 /* Additive Latency */
126#define BITP_DMC_MR1_RTT0 2 /* Rtt_nom */
127#define BITP_DMC_MR1_DIC0 1 /* Output Driver Impedance control */
128#define BITP_DMC_MR1_DLLEN 0 /* DLL Enable */
129
130#define BITP_DMC_MR2_CWL 3 /* CAS write Latency */
131
132#define BITP_DMC_TR0_TMRD 28 /* Timing Mode Register Delay */
133#define BITP_DMC_TR0_TRC 20 /* Timing Row Cycle */
134#define BITP_DMC_TR0_TRAS 12 /* Timing Row Active Time */
135#define BITP_DMC_TR0_TRP 8 /* Timing RAS Precharge. */
136#define BITP_DMC_TR0_TWTR 4 /* Timing Write to Read */
137#define BITP_DMC_TR0_TRCD 0 /* Timing RAS to CAS Delay */
138
139#define BITP_DMC_TR1_TRRD 28 /* Timing Read-Read Delay */
140#define BITP_DMC_TR1_TRFC 16 /* Timing Refresh-to-Command */
141#define BITP_DMC_TR1_TREF 0 /* Timing Refresh Interval */
142
143#define BITP_DMC_TR2_TCKE 20 /* Timing Clock Enable */
144#define BITP_DMC_TR2_TXP 16 /* Timing Exit Powerdown */
145#define BITP_DMC_TR2_TWR 12 /* Timing Write Recovery */
146#define BITP_DMC_TR2_TRTP 8 /* Timing Read-to-Precharge */
147#define BITP_DMC_TR2_TFAW 0 /* Timing Four-Activated-Window */
148
149#define BITP_DMC_MR_PD 12 /* Active Powerdown Mode */
150#define BITP_DMC_MR_WRRECOV 9 /* Write Recovery */
151#define BITP_DMC_MR_DLLRST 8 /* DLL Reset */
152#define BITP_DMC_MR_CL 4 /* CAS Latency */
153#define BITP_DMC_MR_CL0 2 /* CAS Latency */
154#define BITP_DMC_MR_BLEN 0 /* Burst Length */
155
156#define BITP_DMC_DLLCTL_DATACYC 8 /* Data Cycles */
157#define BITP_DMC_DLLCTL_DLLCALRDCNT 0 /* DLL Calibration RD Count */
158
159#define BITM_DMC_DLLCTL_DATACYC 0x00000F00 /* Data Cycles */
160#define BITM_DMC_DLLCTL_DLLCALRDCNT 0x000000FF /* DLL Calib RD Count */
161
162#define BITP_DMC_STAT_PHYRDPHASE 20 /* PHY Read Phase */
163
164#define BITM_DMC_DDR_LANE0_CTL0_CB_RSTDAT 0x08000000 /* Rst Data Pads */
165#define BITM_DMC_DDR_LANE1_CTL0_CB_RSTDAT 0x08000000 /* Rst Data Pads */
166#define BITM_DMC_DDR_LANE0_CTL1_COMP_DCYCLE 0x00000002 /* Compute Dcycle */
167#define BITM_DMC_DDR_LANE1_CTL1_COMP_DCYCLE 0x00000002 /* Compute Dcycle */
168#define BITM_DMC_DDR_LANE1_CTL0_CB_RSTDLL 0x00000100 /* Rst Lane DLL */
169#define BITM_DMC_DDR_LANE0_CTL0_CB_RSTDLL 0x00000100 /* Rst Lane DLL */
170#define BITP_DMC_DDR_ROOT_CTL_PIPE_OFSTDCYCLE 10 /* Pipeline offset for PHYC_DATACYCLE */
171#define BITM_DMC_DDR_ROOT_CTL_SW_REFRESH 0x00002000 /* Refresh Lane DLL Code */
172#define BITM_DMC_DDR_CA_CTL_SW_REFRESH 0x00004000 /* Refresh Lane DLL Code */
173
174#define BITP_DMC_CTL_RL_DQS 26 /* RL_DQS */
175#define BITM_DMC_CTL_RL_DQS 0x04000000 /* RL_DQS */
176#define BITP_DMC_EMR3_MPR 2 /* Multi Purpose Read Enable (Read Leveling)*/
177#define BITM_DMC_EMR3_MPR 0x00000004 /* Multi Purpose Read Enable (Read Leveling)*/
178#define BITM_DMC_MR1_WL 0x00000080 /* Write Leveling Enable.*/
179#define BITM_DMC_STAT_PHYRDPHASE 0x00F00000 /* PHY Read Phase */
180
181#define BITP_DMC_DDR_LANE0_CTL1_BYPCODE 10
182#define BITM_DMC_DDR_LANE0_CTL1_BYPCODE 0x00007C00
183#define BITP_DMC_DDR_LANE0_CTL1_BYPDELCHAINEN 15
184#define BITM_DMC_DDR_LANE0_CTL1_BYPDELCHAINEN 0x00008000
185
186#define DMC_ZQCTL0_VALUE 0x00785A64
187#define DMC_ZQCTL1_VALUE 0
188#define DMC_ZQCTL2_VALUE 0x70000000
189
190#define DMC_TRIG_CALIB 0
191#define DMC_OFSTDCYCLE 2
192
193#define BITP_DMC_CAL_PADCTL0_RTTCALEN 31 /* RTT Calibration Enable */
194#define BITP_DMC_CAL_PADCTL0_PDCALEN 30 /* PULLDOWN Calib Enable */
195#define BITP_DMC_CAL_PADCTL0_PUCALEN 29 /* PULLUP Calib Enable */
196#define BITP_DMC_CAL_PADCTL0_CALSTRT 28 /* Start New Calib ( Hardware Cleared) */
197#define BITM_DMC_CAL_PADCTL0_RTTCALEN 0x80000000 /* RTT Calibration Enable */
198#define BITM_DMC_CAL_PADCTL0_PDCALEN 0x40000000 /* PULLDOWN Calib Enable */
199#define BITM_DMC_CAL_PADCTL0_PUCALEN 0x20000000 /* PULLUP Calib Enable */
200#define BITM_DMC_CAL_PADCTL0_CALSTRT 0x10000000 /* Start New Calib ( Hardware Cleared) */
201#define ENUM_DMC_PHY_CTL4_DDR3 0x00000000 /* DDRMODE: DDR3 Mode */
202#define ENUM_DMC_PHY_CTL4_DDR2 0x00000001 /* DDRMODE: DDR2 Mode */
203#define ENUM_DMC_PHY_CTL4_LPDDR 0x00000003 /* DDRMODE: LPDDR Mode */
204
205#define BITP_DMC_DDR_ZQ_CTL0_IMPRTT 16 /* Data/DQS ODT */
206#define BITP_DMC_DDR_ZQ_CTL0_IMPWRDQ 8 /* Data/DQS/DM/CLK Drive Strength */
207#define BITP_DMC_DDR_ZQ_CTL0_IMPWRADD 0 /* Address/Command Drive Strength */
208#define BITM_DMC_DDR_ZQ_CTL0_IMPRTT 0x00FF0000 /* Data/DQS ODT */
209#define BITM_DMC_DDR_ZQ_CTL0_IMPWRDQ 0x0000FF00 /* Data/DQS/DM/CLK Drive Strength */
210#define BITM_DMC_DDR_ZQ_CTL0_IMPWRADD 0x000000FF /* Address/Command Drive Strength */
211
212#define BITM_DMC_DDR_ROOT_CTL_TRIG_RD_XFER_ALL 0x00200000 /* All Lane Read Status */
213
214#if defined(CONFIG_ADI_USE_DDR2)
215 #define DMC_MR0_VALUE \
216 ((DMC_BL / 4 + 1) << BITP_DMC_MR_BLEN) | \
217 (DMC_CL << BITP_DMC_MR_CL) | \
218 (DMC_WRRECOV << BITP_DMC_MR_WRRECOV)
219
220 #define DMC_MR1_VALUE \
221 (DMC_MR1_AL << BITP_DMC_MR1_AL | 0x04) \
222
223 #define DMC_MR2_VALUE 0
224 #define DMC_MR3_VALUE 0
225
226 #define DMC_CTL_VALUE \
227 (DMC_RDTOWR << BITP_DMC_CTL_RDTOWR) | \
228 (1 << BITP_DMC_CTL_DLLCAL) | \
229 (BITM_DMC_CTL_INIT)
230#else
231 #define DMC_MR0_VALUE \
232 (0 << BITP_DMC_MR_BLEN) | \
233 (DMC_CL0 << BITP_DMC_MR_CL0) | \
234 (DMC_CL123 << BITP_DMC_MR_CL) | \
235 (DMC_WRRECOV << BITP_DMC_MR_WRRECOV) | \
236 (1 << BITP_DMC_MR_DLLRST)
237
238 #define DMC_MR1_VALUE \
239 (DMC_MR1_DLLEN << BITP_DMC_MR1_DLLEN) | \
240 (DMC_MR1_DIC0 << BITP_DMC_MR1_DIC0) | \
241 (DMC_MR1_RTT0 << BITP_DMC_MR1_RTT0) | \
242 (DMC_MR1_AL << BITP_DMC_MR1_AL) | \
243 (DMC_MR1_DIC1 << BITP_DMC_MR1_DIC1) | \
244 (DMC_MR1_RTT1 << BITP_DMC_MR1_RTT1) | \
245 (DMC_MR1_RTT2 << BITP_DMC_MR1_RTT2) | \
246 (DMC_MR1_WL << BITP_DMC_MR1_WL) | \
247 (DMC_MR1_TDQS << BITP_DMC_MR1_TDQS) | \
248 (DMC_MR1_QOFF << BITP_DMC_MR1_QOFF)
249
250 #define DMC_MR2_VALUE \
251 ((DMC_WL) << BITP_DMC_MR2_CWL)
252
253 #define DMC_MR3_VALUE \
254 ((DMC_WL) << BITP_DMC_MR2_CWL)
255
256 #define DMC_CTL_VALUE \
257 (DMC_RDTOWR << BITP_DMC_CTL_RDTOWR) | \
258 (BITM_DMC_CTL_INIT) | \
259 (BITM_DMC_CTL_DDR3EN) | \
260 (DMC_CTL_AL_EN << BITP_DMC_CTL_AL_EN)
261#endif
262
263#define DMC_DLLCTL_VALUE \
264 (DMC_DATACYC << BITP_DMC_DLLCTL_DATACYC) | \
265 (DMC_DLLCALRDCNT << BITP_DMC_DLLCTL_DLLCALRDCNT)
266
267#define DMC_CFG_VALUE \
268 ENUM_DMC_CFG_IFWID16 | \
269 ENUM_DMC_CFG_SDRWID16 | \
270 SDR_CHIP_SIZE | \
271 ENUM_DMC_CFG_EXTBANK1
272
273#define DMC_TR0_VALUE \
274 (DMC_TRCD << BITP_DMC_TR0_TRCD) | \
275 (DMC_TWTR << BITP_DMC_TR0_TWTR) | \
276 (DMC_TRP << BITP_DMC_TR0_TRP) | \
277 (DMC_TRAS << BITP_DMC_TR0_TRAS) | \
278 (DMC_TRC << BITP_DMC_TR0_TRC) | \
279 (DMC_TMRD << BITP_DMC_TR0_TMRD)
280
281#define DMC_TR1_VALUE \
282 (DMC_TREF << BITP_DMC_TR1_TREF) | \
283 (DMC_TRFC << BITP_DMC_TR1_TRFC) | \
284 (DMC_TRRD << BITP_DMC_TR1_TRRD)
285
286#define DMC_TR2_VALUE \
287 (DMC_TFAW << BITP_DMC_TR2_TFAW) | \
288 (DMC_TRTP << BITP_DMC_TR2_TRTP) | \
289 (DMC_TWR << BITP_DMC_TR2_TWR) | \
290 (DMC_TXP << BITP_DMC_TR2_TXP) | \
291 (DMC_TCKE << BITP_DMC_TR2_TCKE)
292
293enum DDR_MODE {
294 DDR3_MODE,
295 DDR2_MODE,
296 LPDDR_MODE,
297};
298
299enum CALIBRATION_MODE {
300 CALIBRATION_LEGACY,
301 CALIBRATION_METHOD1,
302 CALIBRATION_METHOD2,
303};
304
305static struct dmc_param {
306 phys_addr_t reg;
307 u32 ddr_mode;
308 u32 padctl2_value;
309 u32 dmc_cphyctl_value;
310 u32 dmc_cfg_value;
311 u32 dmc_dllctl_value;
312 u32 dmc_ctl_value;
313 u32 dmc_tr0_value;
314 u32 dmc_tr1_value;
315 u32 dmc_tr2_value;
316 u32 dmc_mr0_value;
317 u32 dmc_mr1_value;
318 u32 dmc_mr2_value;
319 u32 dmc_mr3_value;
320 u32 dmc_zqctl0_value;
321 u32 dmc_zqctl1_value;
322 u32 dmc_zqctl2_value;
323 u32 dmc_data_calib_add_value;
324 bool phy_init_required;
325 bool anomaly_20000037_applicable;
326 enum CALIBRATION_MODE calib_mode;
327} dmc;
328
329#ifdef CONFIG_SC59X_64
330#define DQS_DEFAULT_DELAY 3ul
331
332#define DELAYTRIM 1
333#define LANE0_DQS_DELAY 1
334#define LANE1_DQS_DELAY 1
335
336#define CLKDIR 0ul
337
338#define DQSTRIM 0
339#define DQSCODE 0ul
340
341#define CLKTRIM 0
342#define CLKCODE 0ul
343#endif
344
345static inline void calibration_legacy(void)
346{
347 u32 temp;
348
349 /* 1. Set DDR mode to DDR3/DDR2/LPDDR in DMCx_PHY_CTL4 register */
350 if (dmc.ddr_mode == DDR3_MODE)
351 writel(ENUM_DMC_PHY_CTL4_DDR3, dmc.reg + REG_DMC_PHY_CTL4);
352 else if (dmc.ddr_mode == DDR2_MODE)
353 writel(ENUM_DMC_PHY_CTL4_DDR2, dmc.reg + REG_DMC_PHY_CTL4);
354 else if (dmc.ddr_mode == LPDDR_MODE)
355 writel(ENUM_DMC_PHY_CTL4_LPDDR, dmc.reg + REG_DMC_PHY_CTL4);
356
357 /*
358 * 2. Make sure that the bits 6, 7, 25, and 27 of the DMC_PHY_
359 * CTL3 register are set
360 */
361 writel(0x0A0000C0, dmc.reg + REG_DMC_PHY_CTL3);
362
363 /*
364 * 3. For DDR2/DDR3 mode, make sure that the bits 0, 1, 2, 3 of
365 * the DMC_PHY_CTL0 register and the bits 26, 27, 28, 29, 30, 31
366 * of the DMC_PHY_CTL2 are set.
367 */
368 if (dmc.ddr_mode == DDR3_MODE ||
369 dmc.ddr_mode == DDR2_MODE) {
370 writel(0XFC000000, dmc.reg + REG_DMC_PHY_CTL2);
371 writel(0x0000000f, dmc.reg + REG_DMC_PHY_CTL0);
372 }
373
374 writel(0x00000000, dmc.reg + REG_DMC_PHY_CTL1);
375
376 /* 4. For DDR3 mode, set bit 1 and configure bits [5:2] of the
377 * DMC_CPHY_CTL register with WL=CWL+AL in DCLK cycles.
378 */
379 if (dmc.ddr_mode == DDR3_MODE)
380 writel(dmc.dmc_cphyctl_value, dmc.reg + REG_DMC_CPHY_CTL);
381 /* 5. Perform On Die Termination(ODT) & Driver Impedance Calibration */
382 if (dmc.ddr_mode == LPDDR_MODE) {
383 /* Bypass processor ODT */
384 writel(0x80000, dmc.reg + REG_DMC_PHY_CTL1);
385 } else {
386 /* Set bits RTTCALEN, PDCALEN, PUCALEN of register */
387 temp = BITM_DMC_CAL_PADCTL0_RTTCALEN |
388 BITM_DMC_CAL_PADCTL0_PDCALEN |
389 BITM_DMC_CAL_PADCTL0_PUCALEN;
390 writel(temp, dmc.reg + REG_DMC_CAL_PADCTL0);
391 /* Configure ODT and drive impedance values in the
392 * DMCx_CAL_PADCTL2 register
393 */
394 writel(dmc.padctl2_value, dmc.reg + REG_DMC_CAL_PADCTL2);
395 /* start calibration */
396 temp |= BITM_DMC_CAL_PADCTL0_CALSTRT;
397 writel(temp, dmc.reg + REG_DMC_CAL_PADCTL0);
398 /* Wait for PAD calibration to complete - 300 DCLK cycle.
399 * Worst case: CCLK=450 MHz, DCLK=125 MHz
400 */
401 dmcdelay(300);
402 }
403}
404
405static inline void calibration_method1(void)
406{
407#if defined(CONFIG_SC59X) || defined(CONFIG_SC59X_64)
408 writel(dmc.dmc_zqctl0_value, dmc.reg + REG_DMC_DDR_ZQ_CTL0);
409 writel(dmc.dmc_zqctl1_value, dmc.reg + REG_DMC_DDR_ZQ_CTL1);
410 writel(dmc.dmc_zqctl2_value, dmc.reg + REG_DMC_DDR_ZQ_CTL2);
411
412 /* Generate the trigger */
413 writel(0x0ul, dmc.reg + REG_DMC_DDR_CA_CTL);
414 writel(0x0ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
415 writel(0x00010000ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
416 dmcdelay(8000u);
417
418 /* The [31:26] bits may change if pad ring changes */
419 writel(0x0C000001ul | DMC_TRIG_CALIB, dmc.reg + REG_DMC_DDR_CA_CTL);
420 dmcdelay(8000u);
421 writel(0x0ul, dmc.reg + REG_DMC_DDR_CA_CTL);
422 writel(0x0ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
423#endif
424}
425
426static inline void calibration_method2(void)
427{
428#if defined(CONFIG_SC59X) || defined(CONFIG_SC59X_64)
429 u32 stat_value = 0x0u;
430 u32 drv_pu, drv_pd, odt_pu, odt_pd;
431 u32 ro_dt, clk_dqs_drv_impedance;
432 u32 temp;
433
434 /* Reset trigger */
435 writel(0x0ul, dmc.reg + REG_DMC_DDR_CA_CTL);
436 writel(0x0ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
437 writel(0x0ul, dmc.reg + REG_DMC_DDR_SCRATCH_3);
438 writel(0x0ul, dmc.reg + REG_DMC_DDR_SCRATCH_2);
439
440 /* Writing internal registers in calib pad to zero. Calib mode set
441 * to 1 [26], trig M1 S1 write [16], this enables usage of scratch
442 * registers instead of ZQCTL registers
443 */
444 writel(0x04010000ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
445 dmcdelay(2500u);
446
447 /* TRIGGER FOR M2-S2 WRITE -> slave id 31:26 trig m2,s2 write
448 * bit 1->1 slave1 address is 4
449 */
450 writel(0x10000002ul, dmc.reg + REG_DMC_DDR_CA_CTL);
451 dmcdelay(2500u);
452
453 /* reset Trigger */
454 writel(0x0u, dmc.reg + REG_DMC_DDR_CA_CTL);
455 writel(0x0u, dmc.reg + REG_DMC_DDR_ROOT_CTL);
456
457 /* write to slave 1, make the power down bit high */
458 writel(0x1ul << 12, dmc.reg + REG_DMC_DDR_SCRATCH_3);
459 writel(0x0ul, dmc.reg + REG_DMC_DDR_SCRATCH_2);
460 dmcdelay(2500u);
461
462 /* Calib mode set to 1 [26], trig M1 S1 write [16] */
463 writel(0x04010000ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
464 dmcdelay(2500u);
465
466 writel(0x10000002ul, dmc.reg + REG_DMC_DDR_CA_CTL);
467 dmcdelay(2500u);
468
469 writel(0x0ul, dmc.reg + REG_DMC_DDR_CA_CTL);
470 writel(0x0ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
471 writel(0x0, dmc.reg + REG_DMC_DDR_SCRATCH_3);
472
473 /* for slave 0 */
474 writel(dmc.dmc_zqctl0_value, dmc.reg + REG_DMC_DDR_SCRATCH_2);
475
476 /* Calib mode set to 1 [26], trig M1 S1 write [16] */
477 writel(0x04010000ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
478 dmcdelay(2500u);
479
480 writel(0x0C000002ul, dmc.reg + REG_DMC_DDR_CA_CTL);
481 dmcdelay(2500u);
482
483 writel(0x0ul, dmc.reg + REG_DMC_DDR_CA_CTL);
484 writel(0x0ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
485
486 /* writing to slave 1
487 * calstrt is 0, but other programming is done
488 *
489 * make power down LOW again, to kickstart BIAS circuit
490 */
491 writel(0x0ul, dmc.reg + REG_DMC_DDR_SCRATCH_3);
492 writel(0x30000000ul, dmc.reg + REG_DMC_DDR_SCRATCH_2);
493
494 /* write to ca_ctl lane, calib mode set to 1 [26],
495 * trig M1 S1 write [16]
496 */
497 writel(0x04010000ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
498 dmcdelay(2500u);
499
500 /* copies data to lane controller slave
501 * TRIGGER FOR M2-S2 WRITE -> slave id 31:26
502 * trig m2,s2 write bit 1->1
503 * slave1 address is 4
504 */
505 writel(0x10000002ul, dmc.reg + REG_DMC_DDR_CA_CTL);
506 dmcdelay(2500u);
507
508 /* reset Trigger */
509 writel(0x0ul, dmc.reg + REG_DMC_DDR_CA_CTL);
510 writel(0x0ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
511 writel(0x0ul, dmc.reg + REG_DMC_DDR_SCRATCH_3);
512 writel(0x0ul, dmc.reg + REG_DMC_DDR_SCRATCH_2);
513 writel(0x0ul, dmc.reg + REG_DMC_DDR_SCRATCH_3);
514 writel(0x0ul, dmc.reg + REG_DMC_DDR_SCRATCH_2);
515 writel(0x04010000ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
516 dmcdelay(2500u);
517 writel(0x10000002ul, dmc.reg + REG_DMC_DDR_CA_CTL);
518 dmcdelay(2500u);
519 writel(0x0ul, dmc.reg + REG_DMC_DDR_CA_CTL);
520 writel(0x0ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
521 writel(0x0ul, dmc.reg + REG_DMC_DDR_SCRATCH_3);
522 writel(0x0ul, dmc.reg + REG_DMC_DDR_SCRATCH_2);
523 writel(0x0ul, dmc.reg + REG_DMC_DDR_SCRATCH_3);
524 writel(0x50000000ul, dmc.reg + REG_DMC_DDR_SCRATCH_2);
525 writel(0x04010000ul, dmc.reg + REG_DMC_DDR_ROOT_CTL);
526 dmcdelay(2500u);
527 writel(0x10000002ul, dmc.reg + REG_DMC_DDR_CA_CTL);
528 dmcdelay(2500u);
529 writel(0u, dmc.reg + REG_DMC_DDR_CA_CTL);
530 writel(0u, dmc.reg + REG_DMC_DDR_ROOT_CTL);
531 writel(0x0C000004u, dmc.reg + REG_DMC_DDR_CA_CTL);
532 dmcdelay(2500u);
533 writel(BITM_DMC_DDR_ROOT_CTL_TRIG_RD_XFER_ALL,
534 dmc.reg + REG_DMC_DDR_ROOT_CTL);
535 dmcdelay(2500u);
536 writel(0u, dmc.reg + REG_DMC_DDR_CA_CTL);
537 writel(0u, dmc.reg + REG_DMC_DDR_ROOT_CTL);
538 // calculate ODT PU and PD values
539 stat_value = ((readl(dmc.reg + REG_DMC_DDR_SCRATCH_7) & 0x0000FFFFu) <<
540 16);
541 stat_value |= ((readl(dmc.reg + REG_DMC_DDR_SCRATCH_6) & 0xFFFF0000u) >>
542 16);
543 clk_dqs_drv_impedance = ((dmc.dmc_zqctl0_value) &
544 BITM_DMC_DDR_ZQ_CTL0_IMPWRDQ) >> BITP_DMC_DDR_ZQ_CTL0_IMPWRDQ;
545 ro_dt = ((dmc.dmc_zqctl0_value) & BITM_DMC_DDR_ZQ_CTL0_IMPRTT) >>
546 BITP_DMC_DDR_ZQ_CTL0_IMPRTT;
547 drv_pu = stat_value & 0x0000003Fu;
548 drv_pd = (stat_value >> 12) & 0x0000003Fu;
549 odt_pu = (drv_pu * clk_dqs_drv_impedance) / ro_dt;
550 odt_pd = (drv_pd * clk_dqs_drv_impedance) / ro_dt;
551 temp = ((1uL << 24) |
552 ((drv_pd & 0x0000003Fu)) |
553 ((odt_pd & 0x0000003Fu) << 6) |
554 ((drv_pu & 0x0000003Fu) << 12) |
555 ((odt_pu & 0x0000003Fu) << 18));
556 temp |= readl(dmc.reg + REG_DMC_DDR_SCRATCH_2);
557 writel(temp, dmc.reg + REG_DMC_DDR_SCRATCH_2);
558 writel(0x0C010000u, dmc.reg + REG_DMC_DDR_ROOT_CTL);
559 dmcdelay(2500u);
560 writel(0x08000002u, dmc.reg + REG_DMC_DDR_CA_CTL);
561 dmcdelay(2500u);
562 writel(0u, dmc.reg + REG_DMC_DDR_CA_CTL);
563 writel(0u, dmc.reg + REG_DMC_DDR_ROOT_CTL);
564 writel(0x04010000u, dmc.reg + REG_DMC_DDR_ROOT_CTL);
565 dmcdelay(2500u);
566 writel(0x80000002u, dmc.reg + REG_DMC_DDR_CA_CTL);
567 dmcdelay(2500u);
568 writel(0u, dmc.reg + REG_DMC_DDR_CA_CTL);
569 writel(0u, dmc.reg + REG_DMC_DDR_ROOT_CTL);
570#endif
571}
572
573static inline void adi_dmc_lane_reset(bool reset, uint32_t dmc_no)
574{
575#if defined(CONFIG_SC59X) || defined(CONFIG_SC59X_64)
576 u32 temp;
577 phys_addr_t base = (dmc_no == 0) ? REG_DMC0_BASE : REG_DMC1_BASE;
578 phys_addr_t ln0 = base + REG_DMC_DDR_LANE0_CTL0;
579 phys_addr_t ln1 = base + REG_DMC_DDR_LANE1_CTL0;
580
581 if (reset) {
582 temp = readl(ln0);
583 temp |= BITM_DMC_DDR_LANE0_CTL0_CB_RSTDLL;
584 writel(temp, ln0);
585
586 temp = readl(ln1);
587 temp |= BITM_DMC_DDR_LANE1_CTL0_CB_RSTDLL;
588 writel(temp, ln1);
589 } else {
590 temp = readl(ln0);
591 temp &= ~BITM_DMC_DDR_LANE0_CTL0_CB_RSTDLL;
592 writel(temp, ln0);
593
594 temp = readl(ln1);
595 temp &= ~BITM_DMC_DDR_LANE1_CTL0_CB_RSTDLL;
596 writel(temp, ln1);
597 }
598 dmcdelay(9000u);
599#endif
600}
601
602void adi_dmc_reset_lanes(bool reset)
603{
604 if (!IS_ENABLED(CONFIG_ADI_USE_DDR2)) {
605 if (IS_ENABLED(CONFIG_SC59X) || IS_ENABLED(CONFIG_SC59X_64)) {
606 if (IS_ENABLED(CONFIG_ADI_USE_DMC0))
607 adi_dmc_lane_reset(reset, 0);
608 if (IS_ENABLED(CONFIG_ADI_USE_DMC1))
609 adi_dmc_lane_reset(reset, 1);
610 }
611 else {
612 u32 temp = reset ? 0x800 : 0x0;
613
614 if (IS_ENABLED(CONFIG_ADI_USE_DMC0))
615 writel(temp, REG_DMC0_BASE + REG_DMC_PHY_CTL0);
616 if (IS_ENABLED(CONFIG_ADI_USE_DMC1))
617 writel(temp, REG_DMC1_BASE + REG_DMC_PHY_CTL0);
618 }
619 }
620}
621
622static inline void dmc_controller_init(void)
623{
624#if defined(CONFIG_SC59X) || defined(CONFIG_SC59X_64)
625 u32 phyphase, rd_cnt, t_EMR1, t_EMR3, t_CTL, data_cyc, temp;
626#endif
627
628 /* 1. Program the DMC controller registers: DMCx_CFG, DMCx_TR0,
629 * DMCx_TR1, DMCx_TR2, DMCx_MR(DDR2/LPDDR)/DMCx_MR0(DDR3),
630 * DMCx_EMR1(DDR2)/DMCx_MR1(DDR3),
631 * DMCx_EMR2(DDR2)/DMCx_EMR(LPDDR)/DMCx_MR2(DDR3)
632 */
633 writel(dmc.dmc_cfg_value, dmc.reg + REG_DMC_CFG);
634 writel(dmc.dmc_tr0_value, dmc.reg + REG_DMC_TR0);
635 writel(dmc.dmc_tr1_value, dmc.reg + REG_DMC_TR1);
636 writel(dmc.dmc_tr2_value, dmc.reg + REG_DMC_TR2);
637 writel(dmc.dmc_mr0_value, dmc.reg + REG_DMC_MR);
638 writel(dmc.dmc_mr1_value, dmc.reg + REG_DMC_EMR1);
639 writel(dmc.dmc_mr2_value, dmc.reg + REG_DMC_EMR2);
640
641#if defined(CONFIG_SC59X) || defined(CONFIG_SC59X_64)
642 writel(dmc.dmc_mr3_value, dmc.reg + REG_DMC_EMR3);
643 writel(dmc.dmc_dllctl_value, dmc.reg + REG_DMC_DLLCTL);
644 dmcdelay(2000u);
645
646 temp = readl(dmc.reg + REG_DMC_DDR_CA_CTL);
647 temp |= BITM_DMC_DDR_CA_CTL_SW_REFRESH;
648 writel(temp, dmc.reg + REG_DMC_DDR_CA_CTL);
649 dmcdelay(5u);
650
651 temp = readl(dmc.reg + REG_DMC_DDR_ROOT_CTL);
652 temp |= BITM_DMC_DDR_ROOT_CTL_SW_REFRESH |
653 (DMC_OFSTDCYCLE << BITP_DMC_DDR_ROOT_CTL_PIPE_OFSTDCYCLE);
654 writel(temp, dmc.reg + REG_DMC_DDR_ROOT_CTL);
655#endif
656
657 /* 2. Make sure that the REG_DMC_DT_CALIB_ADDR register is programmed
658 * to an unused DMC location corresponding to a burst of 16 bytes
659 * (by default it is the starting address of the DMC address range).
660 */
661#ifndef CONFIG_SC59X
662 writel(dmc.dmc_data_calib_add_value, dmc.reg + REG_DMC_DT_CALIB_ADDR);
663#endif
664 /* 3. Program the DMCx_CTL register with INIT bit set to start
665 * the DMC initialization sequence
666 */
667 writel(dmc.dmc_ctl_value, dmc.reg + REG_DMC_CTL);
668 /* 4. Wait for the DMC initialization to complete by polling
669 * DMCx_STAT.INITDONE bit.
670 */
671
672#if defined(CONFIG_SC59X) || defined(CONFIG_SC59X_64)
673 dmcdelay(722000u);
674
675 /* Add necessary delay depending on the configuration */
676 t_EMR1 = (dmc.dmc_mr1_value & BITM_DMC_MR1_WL) >> BITP_DMC_MR1_WL;
677
678 dmcdelay(600u);
679 if (t_EMR1 != 0u)
680 while ((readl(dmc.reg + REG_DMC_EMR1) & BITM_DMC_MR1_WL) != 0)
681 ;
682
683 t_EMR3 = (dmc.dmc_mr3_value & BITM_DMC_EMR3_MPR) >>
684 BITP_DMC_EMR3_MPR;
685 dmcdelay(2000u);
686 if (t_EMR3 != 0u)
687 while ((readl(dmc.reg + REG_DMC_EMR3) & BITM_DMC_EMR3_MPR) != 0)
688 ;
689
690 t_CTL = (dmc.dmc_ctl_value & BITM_DMC_CTL_RL_DQS) >> BITP_DMC_CTL_RL_DQS;
691 dmcdelay(600u);
692 if (t_CTL != 0u)
693 while ((readl(dmc.reg + REG_DMC_CTL) & BITM_DMC_CTL_RL_DQS) != 0)
694 ;
695#endif
696
697 /* check if DMC initialization finished*/
698 while ((readl(dmc.reg + REG_DMC_STAT) & BITM_DMC_STAT_INITDONE) == 0)
699 ;
700
701#if defined(CONFIG_SC59X) || defined(CONFIG_SC59X_64)
702 /* toggle DCYCLE */
703 temp = readl(dmc.reg + REG_DMC_DDR_LANE0_CTL1);
704 temp |= BITM_DMC_DDR_LANE0_CTL1_COMP_DCYCLE;
705 writel(temp, dmc.reg + REG_DMC_DDR_LANE0_CTL1);
706
707 temp = readl(dmc.reg + REG_DMC_DDR_LANE1_CTL1);
708 temp |= BITM_DMC_DDR_LANE1_CTL1_COMP_DCYCLE;
709 writel(temp, dmc.reg + REG_DMC_DDR_LANE1_CTL1);
710
711 dmcdelay(10u);
712
713 temp = readl(dmc.reg + REG_DMC_DDR_LANE0_CTL1);
714 temp &= (~BITM_DMC_DDR_LANE0_CTL1_COMP_DCYCLE);
715 writel(temp, dmc.reg + REG_DMC_DDR_LANE0_CTL1);
716
717 temp = readl(dmc.reg + REG_DMC_DDR_LANE1_CTL1);
718 temp &= (~BITM_DMC_DDR_LANE1_CTL1_COMP_DCYCLE);
719 writel(temp, dmc.reg + REG_DMC_DDR_LANE1_CTL1);
720
721 /* toggle RSTDAT */
722 temp = readl(dmc.reg + REG_DMC_DDR_LANE0_CTL0);
723 temp |= BITM_DMC_DDR_LANE0_CTL0_CB_RSTDAT;
724 writel(temp, dmc.reg + REG_DMC_DDR_LANE0_CTL0);
725
726 temp = readl(dmc.reg + REG_DMC_DDR_LANE0_CTL0);
727 temp &= (~BITM_DMC_DDR_LANE0_CTL0_CB_RSTDAT);
728 writel(temp, dmc.reg + REG_DMC_DDR_LANE0_CTL0);
729
730 temp = readl(dmc.reg + REG_DMC_DDR_LANE1_CTL0);
731 temp |= BITM_DMC_DDR_LANE1_CTL0_CB_RSTDAT;
732 writel(temp, dmc.reg + REG_DMC_DDR_LANE1_CTL0);
733
734 temp = readl(dmc.reg + REG_DMC_DDR_LANE1_CTL0);
735 temp &= (~BITM_DMC_DDR_LANE1_CTL0_CB_RSTDAT);
736 writel(temp, dmc.reg + REG_DMC_DDR_LANE1_CTL0);
737
738 dmcdelay(2500u);
739
740 /* Program phyphase*/
741 phyphase = (readl(dmc.reg + REG_DMC_STAT) &
742 BITM_DMC_STAT_PHYRDPHASE) >> BITP_DMC_STAT_PHYRDPHASE;
743 data_cyc = (phyphase << BITP_DMC_DLLCTL_DATACYC) &
744 BITM_DMC_DLLCTL_DATACYC;
745 rd_cnt = dmc.dmc_dllctl_value;
746 rd_cnt <<= BITP_DMC_DLLCTL_DLLCALRDCNT;
747 rd_cnt &= BITM_DMC_DLLCTL_DLLCALRDCNT;
748 writel(rd_cnt | data_cyc, dmc.reg + REG_DMC_DLLCTL);
749 writel((dmc.dmc_ctl_value & (~BITM_DMC_CTL_INIT) &
750 (~BITM_DMC_CTL_RL_DQS)), dmc.reg + REG_DMC_CTL);
751
752#if DELAYTRIM
753 /* DQS delay trim*/
754 u32 stat_value, WL_code_LDQS, WL_code_UDQS;
755
756 /* For LDQS */
757 temp = readl(dmc.reg + REG_DMC_DDR_LANE0_CTL1) | (0x000000D0);
758 writel(temp, dmc.reg + REG_DMC_DDR_LANE0_CTL1);
759 dmcdelay(2500u);
760 writel(0x00400000, dmc.reg + REG_DMC_DDR_ROOT_CTL);
761 dmcdelay(2500u);
762 writel(0x0, dmc.reg + REG_DMC_DDR_ROOT_CTL);
763 stat_value = (readl(dmc.reg + REG_DMC_DDR_SCRATCH_STAT0) &
764 (0xFFFF0000)) >> 16;
765 WL_code_LDQS = (stat_value) & (0x0000001F);
766
767 temp = readl(dmc.reg + REG_DMC_DDR_LANE0_CTL1);
768 temp &= ~(BITM_DMC_DDR_LANE0_CTL1_BYPCODE |
769 BITM_DMC_DDR_LANE0_CTL1_BYPDELCHAINEN);
770 writel(temp, dmc.reg + REG_DMC_DDR_LANE0_CTL1);
771
772 /* If write leveling is enabled */
773 if ((dmc.dmc_mr1_value & BITM_DMC_MR1_WL) >> BITP_DMC_MR1_WL) {
774 temp = readl(dmc.reg + REG_DMC_DDR_LANE0_CTL1);
775 temp |= (((WL_code_LDQS + LANE0_DQS_DELAY) <<
776 BITP_DMC_DDR_LANE0_CTL1_BYPCODE) &
777 BITM_DMC_DDR_LANE0_CTL1_BYPCODE) |
778 BITM_DMC_DDR_LANE0_CTL1_BYPDELCHAINEN;
779 writel(temp, dmc.reg + REG_DMC_DDR_LANE0_CTL1);
780 } else {
781 temp = readl(dmc.reg + REG_DMC_DDR_LANE0_CTL1);
782 temp |= (((DQS_DEFAULT_DELAY + LANE0_DQS_DELAY) <<
783 BITP_DMC_DDR_LANE0_CTL1_BYPCODE) &
784 BITM_DMC_DDR_LANE0_CTL1_BYPCODE) |
785 BITM_DMC_DDR_LANE0_CTL1_BYPDELCHAINEN;
786 writel(temp, dmc.reg + REG_DMC_DDR_LANE0_CTL1);
787 }
788 dmcdelay(2500u);
789
790 /* For UDQS */
791 temp = readl(dmc.reg + REG_DMC_DDR_LANE1_CTL1) | (0x000000D0);
792 writel(temp, dmc.reg + REG_DMC_DDR_LANE1_CTL1);
793 dmcdelay(2500u);
794 writel(0x00800000, dmc.reg + REG_DMC_DDR_ROOT_CTL);
795 dmcdelay(2500u);
796 writel(0x0, dmc.reg + REG_DMC_DDR_ROOT_CTL);
797 stat_value = (readl(dmc.reg + REG_DMC_DDR_SCRATCH_STAT1) &
798 (0xFFFF0000)) >> 16;
799 WL_code_UDQS = (stat_value) & (0x0000001F);
800
801 temp = readl(dmc.reg + REG_DMC_DDR_LANE1_CTL1);
802 temp &= ~(BITM_DMC_DDR_LANE0_CTL1_BYPCODE |
803 BITM_DMC_DDR_LANE0_CTL1_BYPDELCHAINEN);
804 writel(temp, dmc.reg + REG_DMC_DDR_LANE1_CTL1);
805
806 /* If write leveling is enabled */
807 if ((dmc.dmc_mr1_value & BITM_DMC_MR1_WL) >> BITP_DMC_MR1_WL) {
808 temp = readl(dmc.reg + REG_DMC_DDR_LANE1_CTL1);
809 temp |= (((WL_code_UDQS + LANE1_DQS_DELAY) <<
810 BITP_DMC_DDR_LANE0_CTL1_BYPCODE) &
811 BITM_DMC_DDR_LANE0_CTL1_BYPCODE) |
812 BITM_DMC_DDR_LANE0_CTL1_BYPDELCHAINEN;
813 writel(temp, dmc.reg + REG_DMC_DDR_LANE1_CTL1);
814 } else {
815 temp = readl(dmc.reg + REG_DMC_DDR_LANE1_CTL1);
816 temp |= (((DQS_DEFAULT_DELAY + LANE1_DQS_DELAY) <<
817 BITP_DMC_DDR_LANE0_CTL1_BYPCODE) &
818 BITM_DMC_DDR_LANE0_CTL1_BYPCODE) |
819 BITM_DMC_DDR_LANE0_CTL1_BYPDELCHAINEN;
820 writel(temp, dmc.reg + REG_DMC_DDR_LANE1_CTL1);
821 }
822 dmcdelay(2500u);
823#endif
824
825#else
826 /* 5. Program the DMCx_CTL.DLLCTL register with 0x948 value
827 * (DATACYC=9, DLLCALRDCNT=72).
828 */
829 writel(0x00000948, dmc.reg + REG_DMC_DLLCTL);
830#endif
831
832 /* 6. Workaround for anomaly#20000037 */
833 if (dmc.anomaly_20000037_applicable) {
834 /* Perform dummy read to any DMC location */
835 readl(0x80000000);
836
837 writel(readl(dmc.reg + REG_DMC_PHY_CTL0) | 0x1000,
838 dmc.reg + REG_DMC_PHY_CTL0);
839 /* Clear DMCx_PHY_CTL0.RESETDAT bit */
840 writel(readl(dmc.reg + REG_DMC_PHY_CTL0) & (~0x1000),
841 dmc.reg + REG_DMC_PHY_CTL0);
842 }
843}
844
845static inline void dmc_init(void)
846{
847 /* PHY Calibration+Initialization */
848 if (!dmc.phy_init_required)
849 goto out;
850
851 switch (dmc.calib_mode) {
852 case CALIBRATION_LEGACY:
853 calibration_legacy();
854 break;
855 case CALIBRATION_METHOD1:
856 calibration_method1();
857 break;
858 case CALIBRATION_METHOD2:
859 calibration_method2();
860 break;
861 }
862
863#if DQSTRIM
864 /* DQS duty trim */
865 temp = readl(dmc.reg + REG_DMC_DDR_LANE0_CTL0);
866 temp |= ((DQSCODE) << BITP_DMC_DDR_LANE0_CTL0_BYPENB) &
867 (BITM_DMC_DDR_LANE1_CTL0_BYPENB |
868 BITM_DMC_DDR_LANE0_CTL0_BYPSELP |
869 BITM_DMC_DDR_LANE0_CTL0_BYPCODE);
870 writel(temp, dmc.reg + REG_DMC_DDR_LANE0_CTL0);
871
872 temp = readl(dmc.reg + REG_DMC_DDR_LANE1_CTL0);
873 temp |= ((DQSCODE) << BITP_DMC_DDR_LANE1_CTL0_BYPENB) &
874 (BITM_DMC_DDR_LANE1_CTL1_BYPCODE |
875 BITM_DMC_DDR_LANE1_CTL0_BYPSELP |
876 BITM_DMC_DDR_LANE1_CTL0_BYPCODE);
877 writel(temp, dmc.reg + REG_DMC_DDR_LANE1_CTL0);
878#endif
879
880#if CLKTRIM
881 /* Clock duty trim */
882 temp = readl(dmc.reg + REG_DMC_DDR_CA_CTL);
883 temp |= (((CLKCODE << BITP_DMC_DDR_CA_CTL_BYPCODE1) &
884 BITM_DMC_DDR_CA_CTL_BYPCODE1) |
885 BITM_DMC_DDR_CA_CTL_BYPENB |
886 ((CLKDIR << BITP_DMC_DDR_CA_CTL_BYPSELP) &
887 BITM_DMC_DDR_CA_CTL_BYPSELP));
888 writel(temp, dmc.reg + REG_DMC_DDR_CA_CTL);
889#endif
890
891out:
892 /* Controller Initialization */
893 dmc_controller_init();
894}
895
896static inline void __dmc_config(uint32_t dmc_no)
897{
898 if (dmc_no == 0) {
899 dmc.reg = REG_DMC0_BASE;
900 dmc.dmc_data_calib_add_value = DMC0_DATA_CALIB_ADD;
901 } else if (dmc_no == 1) {
902 dmc.reg = REG_DMC1_BASE;
903 dmc.dmc_data_calib_add_value = DMC1_DATA_CALIB_ADD;
904 } else {
905 return;
906 }
907
908 if (IS_ENABLED(CONFIG_ADI_USE_DDR2))
909 dmc.ddr_mode = DDR2_MODE;
910 else
911 dmc.ddr_mode = DDR3_MODE;
912
913 dmc.phy_init_required = true;
914
915#if defined(CONFIG_SC59X) || defined(CONFIG_SC59X_64)
916 dmc.anomaly_20000037_applicable = false;
917 dmc.dmc_dllctl_value = DMC_DLLCTL_VALUE;
918 dmc.calib_mode = CALIBRATION_METHOD2;
919#else
920 dmc.anomaly_20000037_applicable = true;
921 dmc.calib_mode = CALIBRATION_LEGACY;
922#endif
923
924 dmc.dmc_ctl_value = DMC_CTL_VALUE;
925 dmc.dmc_cfg_value = DMC_CFG_VALUE;
926 dmc.dmc_tr0_value = DMC_TR0_VALUE;
927 dmc.dmc_tr1_value = DMC_TR1_VALUE;
928 dmc.dmc_tr2_value = DMC_TR2_VALUE;
929 dmc.dmc_mr0_value = DMC_MR0_VALUE;
930 dmc.dmc_mr1_value = DMC_MR1_VALUE;
931 dmc.dmc_mr2_value = DMC_MR2_VALUE;
932
933#if defined(CONFIG_SC59X) || defined(CONFIG_SC59X_64)
934 dmc.dmc_mr3_value = DMC_MR3_VALUE;
935 dmc.dmc_zqctl0_value = DMC_ZQCTL0_VALUE;
936 dmc.dmc_zqctl1_value = DMC_ZQCTL1_VALUE;
937 dmc.dmc_zqctl2_value = DMC_ZQCTL2_VALUE;
938#endif
939
940 dmc.padctl2_value = DMC_PADCTL2_VALUE;
941 dmc.dmc_cphyctl_value = DMC_CPHYCTL_VALUE;
942
943 /* Initialize DMC now */
944 dmc_init();
945}
946
947void DMC_Config(void)
948{
949 if (IS_ENABLED(CONFIG_ADI_USE_DMC0))
950 __dmc_config(0);
951
952 if (IS_ENABLED(CONFIG_ADI_USE_DMC1))
953 __dmc_config(1);
954}