blob: bfba5d245c68588a3ecc5ea10dfbb8a67de3adf9 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Purna Chandra Mandale80d11f2016-01-28 15:30:15 +05302/*
3 * (c) 2015 Paul Thacker <paul.thacker@microchip.com>
4 *
Purna Chandra Mandale80d11f2016-01-28 15:30:15 +05305 */
Purna Chandra Mandale80d11f2016-01-28 15:30:15 +05306#include <wait_bit.h>
7#include <linux/kernel.h>
8#include <linux/bitops.h>
9#include <mach/pic32.h>
10#include <mach/ddr.h>
11
12#include "ddr2_regs.h"
13#include "ddr2_timing.h"
14
15/* init DDR2 Phy */
16void ddr2_phy_init(void)
17{
18 struct ddr2_phy_regs *ddr2_phy;
19 u32 pad_ctl;
20
21 ddr2_phy = ioremap(PIC32_DDR2P_BASE, sizeof(*ddr2_phy));
22
23 /* PHY_DLL_RECALIB */
24 writel(DELAY_START_VAL(3) | DISABLE_RECALIB(0) |
25 RECALIB_CNT(0x10), &ddr2_phy->dll_recalib);
26
27 /* PHY_PAD_CTRL */
28 pad_ctl = ODT_SEL | ODT_EN | DRIVE_SEL(0) |
29 ODT_PULLDOWN(2) | ODT_PULLUP(3) |
30 EXTRA_OEN_CLK(0) | NOEXT_DLL |
31 DLR_DFT_WRCMD | HALF_RATE |
32 DRVSTR_PFET(0xe) | DRVSTR_NFET(0xe) |
33 RCVR_EN | PREAMBLE_DLY(2);
34 writel(pad_ctl, &ddr2_phy->pad_ctrl);
35
36 /* SCL_CONFIG_0 */
37 writel(SCL_BURST8 | SCL_DDR_CONNECTED | SCL_RCAS_LAT(RL) |
38 SCL_ODTCSWW, &ddr2_phy->scl_config_1);
39
40 /* SCL_CONFIG_1 */
41 writel(SCL_CSEN | SCL_WCAS_LAT(WL), &ddr2_phy->scl_config_2);
42
43 /* SCL_LAT */
44 writel(SCL_CAPCLKDLY(3) | SCL_DDRCLKDLY(4), &ddr2_phy->scl_latency);
45}
46
47/* start phy self calibration logic */
48static int ddr2_phy_calib_start(void)
49{
50 struct ddr2_phy_regs *ddr2_phy;
51
52 ddr2_phy = ioremap(PIC32_DDR2P_BASE, sizeof(*ddr2_phy));
53
54 /* DDR Phy SCL Start */
55 writel(SCL_START | SCL_EN, &ddr2_phy->scl_start);
56
57 /* Wait for SCL for data byte to pass */
Álvaro Fernández Rojas918de032018-01-23 17:14:55 +010058 return wait_for_bit_le32(&ddr2_phy->scl_start, SCL_LUBPASS,
59 true, CONFIG_SYS_HZ, false);
Purna Chandra Mandale80d11f2016-01-28 15:30:15 +053060}
61
62/* DDR2 Controller initialization */
63
64/* Target Agent Arbiter */
65static void ddr_set_arbiter(struct ddr2_ctrl_regs *ctrl,
66 const struct ddr2_arbiter_params *const param)
67{
68 int i;
69
70 for (i = 0; i < NUM_AGENTS; i++) {
71 /* set min burst size */
72 writel(i * MIN_LIM_WIDTH, &ctrl->tsel);
73 writel(param->min_limit, &ctrl->minlim);
74
75 /* set request period (4 * req_period clocks) */
76 writel(i * RQST_PERIOD_WIDTH, &ctrl->tsel);
77 writel(param->req_period, &ctrl->reqprd);
78
79 /* set number of burst accepted */
80 writel(i * MIN_CMDACPT_WIDTH, &ctrl->tsel);
81 writel(param->min_cmd_acpt, &ctrl->mincmd);
82 }
83}
84
85const struct ddr2_arbiter_params *__weak board_get_ddr_arbiter_params(void)
86{
87 /* default arbiter parameters */
88 static const struct ddr2_arbiter_params arb_params[] = {
89 { .min_limit = 0x1f, .req_period = 0xff, .min_cmd_acpt = 0x04,},
90 { .min_limit = 0x1f, .req_period = 0xff, .min_cmd_acpt = 0x10,},
91 { .min_limit = 0x1f, .req_period = 0xff, .min_cmd_acpt = 0x10,},
92 { .min_limit = 0x04, .req_period = 0xff, .min_cmd_acpt = 0x04,},
93 { .min_limit = 0x04, .req_period = 0xff, .min_cmd_acpt = 0x04,},
94 };
95
96 return &arb_params[0];
97}
98
99static void host_load_cmd(struct ddr2_ctrl_regs *ctrl, u32 cmd_idx,
100 u32 hostcmd2, u32 hostcmd1, u32 delay)
101{
102 u32 hc_delay;
103
104 hc_delay = max_t(u32, DIV_ROUND_UP(delay, T_CK), 2) - 2;
105 writel(hostcmd1, &ctrl->cmd10[cmd_idx]);
106 writel((hostcmd2 & 0x7ff) | (hc_delay << 11), &ctrl->cmd20[cmd_idx]);
107}
108
109/* init DDR2 Controller */
110void ddr2_ctrl_init(void)
111{
112 u32 wr2prech, rd2prech, wr2rd, wr2rd_cs;
113 u32 ras2ras, ras2cas, prech2ras, temp;
114 const struct ddr2_arbiter_params *arb_params;
115 struct ddr2_ctrl_regs *ctrl;
116
117 ctrl = ioremap(PIC32_DDR2C_BASE, sizeof(*ctrl));
118
119 /* PIC32 DDR2 controller always work in HALF_RATE */
120 writel(HALF_RATE_MODE, &ctrl->memwidth);
121
122 /* Set arbiter configuration per target */
123 arb_params = board_get_ddr_arbiter_params();
124 ddr_set_arbiter(ctrl, arb_params);
125
126 /* Address Configuration, model {CS, ROW, BA, COL} */
127 writel((ROW_ADDR_RSHIFT | (BA_RSHFT << 8) | (CS_ADDR_RSHIFT << 16) |
128 (COL_HI_RSHFT << 24) | (SB_PRI << 29) |
129 (EN_AUTO_PRECH << 30)), &ctrl->memcfg0);
130
131 writel(ROW_ADDR_MASK, &ctrl->memcfg1);
132 writel(COL_HI_MASK, &ctrl->memcfg2);
133 writel(COL_LO_MASK, &ctrl->memcfg3);
134 writel(BA_MASK | (CS_ADDR_MASK << 8), &ctrl->memcfg4);
135
136 /* Refresh Config */
137 writel(REFCNT_CLK(DIV_ROUND_UP(T_RFI, T_CK_CTRL) - 2) |
138 REFDLY_CLK(DIV_ROUND_UP(T_RFC_MIN, T_CK_CTRL) - 2) |
139 MAX_PEND_REF(7),
140 &ctrl->refcfg);
141
142 /* Power Config */
143 writel(ECC_EN(0) | ERR_CORR_EN(0) | EN_AUTO_PWR_DN(0) |
144 EN_AUTO_SELF_REF(3) | PWR_DN_DLY(8) |
145 SELF_REF_DLY(17) | PRECH_PWR_DN_ONLY(0),
146 &ctrl->pwrcfg);
147
148 /* Delay Config */
149 wr2rd = max_t(u32, DIV_ROUND_UP(T_WTR, T_CK_CTRL),
150 DIV_ROUND_UP(T_WTR_TCK, 2)) + WL + BL;
151 wr2rd_cs = max_t(u32, wr2rd - 1, 3);
152 wr2prech = DIV_ROUND_UP(T_WR, T_CK_CTRL) + WL + BL;
153 rd2prech = max_t(u32, DIV_ROUND_UP(T_RTP, T_CK_CTRL),
154 DIV_ROUND_UP(T_RTP_TCK, 2)) + BL - 2;
155 ras2ras = max_t(u32, DIV_ROUND_UP(T_RRD, T_CK_CTRL),
156 DIV_ROUND_UP(T_RRD_TCK, 2)) - 1;
157 ras2cas = DIV_ROUND_UP(T_RCD, T_CK_CTRL) - 1;
158 prech2ras = DIV_ROUND_UP(T_RP, T_CK_CTRL) - 1;
159
160 writel(((wr2rd & 0x0f) |
161 ((wr2rd_cs & 0x0f) << 4) |
162 ((BL - 1) << 8) |
163 (BL << 12) |
164 ((BL - 1) << 16) |
165 ((BL - 1) << 20) |
166 ((BL + 2) << 24) |
167 ((RL - WL + 3) << 28)), &ctrl->dlycfg0);
168
169 writel(((T_CKE_TCK - 1) |
170 (((DIV_ROUND_UP(T_DLLK, 2) - 2) & 0xff) << 8) |
171 ((T_CKE_TCK - 1) << 16) |
172 ((max_t(u32, T_XP_TCK, T_CKE_TCK) - 1) << 20) |
173 ((wr2prech >> 4) << 26) |
174 ((wr2rd >> 4) << 27) |
175 ((wr2rd_cs >> 4) << 28) |
176 (((RL + 5) >> 4) << 29) |
177 ((DIV_ROUND_UP(T_DLLK, 2) >> 8) << 30)), &ctrl->dlycfg1);
178
179 writel((DIV_ROUND_UP(T_RP, T_CK_CTRL) |
180 (rd2prech << 8) |
181 ((wr2prech & 0x0f) << 12) |
182 (ras2ras << 16) |
183 (ras2cas << 20) |
184 (prech2ras << 24) |
185 ((RL + 3) << 28)), &ctrl->dlycfg2);
186
187 writel(((DIV_ROUND_UP(T_RAS_MIN, T_CK_CTRL) - 1) |
188 ((DIV_ROUND_UP(T_RC, T_CK_CTRL) - 1) << 8) |
189 ((DIV_ROUND_UP(T_FAW, T_CK_CTRL) - 1) << 16)),
190 &ctrl->dlycfg3);
191
192 /* ODT Config */
193 writel(0x0, &ctrl->odtcfg);
194 writel(BIT(16), &ctrl->odtencfg);
195 writel(ODTRDLY(RL - 3) | ODTWDLY(WL - 3) | ODTRLEN(2) | ODTWLEN(3),
196 &ctrl->odtcfg);
197
198 /* Transfer Configuration */
199 writel(NXTDATRQDLY(2) | NXDATAVDLY(4) | RDATENDLY(2) |
200 MAX_BURST(3) | (7 << 28) | BIG_ENDIAN(0),
201 &ctrl->xfercfg);
202
203 /* DRAM Initialization */
204 /* CKE high after reset and wait 400 nsec */
205 host_load_cmd(ctrl, 0, 0, IDLE_NOP, 400000);
206
207 /* issue precharge all command */
208 host_load_cmd(ctrl, 1, 0x04, PRECH_ALL_CMD, T_RP + T_CK);
209
210 /* initialize EMR2 */
211 host_load_cmd(ctrl, 2, 0x200, LOAD_MODE_CMD, T_MRD_TCK * T_CK);
212
213 /* initialize EMR3 */
214 host_load_cmd(ctrl, 3, 0x300, LOAD_MODE_CMD, T_MRD_TCK * T_CK);
215
216 /*
217 * RDQS disable, DQSB enable, OCD exit, 150 ohm termination,
218 * AL=0, DLL enable
219 */
220 host_load_cmd(ctrl, 4, 0x100,
221 LOAD_MODE_CMD | (0x40 << 24), T_MRD_TCK * T_CK);
222 /*
223 * PD fast exit, WR REC = T_WR in clocks -1,
224 * DLL reset, CAS = RL, burst = 4
225 */
226 temp = ((DIV_ROUND_UP(T_WR, T_CK) - 1) << 1) | 1;
227 host_load_cmd(ctrl, 5, temp, LOAD_MODE_CMD | (RL << 28) | (2 << 24),
228 T_MRD_TCK * T_CK);
229
230 /* issue precharge all command */
231 host_load_cmd(ctrl, 6, 4, PRECH_ALL_CMD, T_RP + T_CK);
232
233 /* issue refresh command */
234 host_load_cmd(ctrl, 7, 0, REF_CMD, T_RFC_MIN);
235
236 /* issue refresh command */
237 host_load_cmd(ctrl, 8, 0, REF_CMD, T_RFC_MIN);
238
239 /* Mode register programming as before without DLL reset */
240 host_load_cmd(ctrl, 9, temp, LOAD_MODE_CMD | (RL << 28) | (3 << 24),
241 T_MRD_TCK * T_CK);
242
243 /* extended mode register same as before with OCD default */
244 host_load_cmd(ctrl, 10, 0x103, LOAD_MODE_CMD | (0xc << 24),
245 T_MRD_TCK * T_CK);
246
247 /* extended mode register same as before with OCD exit */
248 host_load_cmd(ctrl, 11, 0x100, LOAD_MODE_CMD | (0x4 << 28),
249 140 * T_CK);
250
251 writel(CMD_VALID | NUMHOSTCMD(11), &ctrl->cmdissue);
252
253 /* start memory initialization */
254 writel(INIT_START, &ctrl->memcon);
255
256 /* wait for all host cmds to be transmitted */
Álvaro Fernández Rojas918de032018-01-23 17:14:55 +0100257 wait_for_bit_le32(&ctrl->cmdissue, CMD_VALID, false,
258 CONFIG_SYS_HZ, false);
Purna Chandra Mandale80d11f2016-01-28 15:30:15 +0530259
260 /* inform all cmds issued, ready for normal operation */
261 writel(INIT_START | INIT_DONE, &ctrl->memcon);
262
263 /* perform phy caliberation */
264 if (ddr2_phy_calib_start())
265 printf("ddr2: phy calib failed\n");
266}
267
268phys_size_t ddr2_calculate_size(void)
269{
270 u32 temp;
271
272 temp = 1 << (COL_BITS + BA_BITS + ROW_BITS);
273 /* 16-bit data width between controller and DIMM */
274 temp = temp * CS_BITS * (16 / 8);
275 return (phys_size_t)temp;
276}