blob: a8c5e6a53479bed76a5da3ecd176ae2c3316e90b [file] [log] [blame]
Stefan Roeseae6223d2015-01-19 11:33:40 +01001/*
2 * Copyright (C) Marvell International Ltd. and its affiliates
3 *
4 * SPDX-License-Identifier: GPL-2.0
5 */
6
7#include <common.h>
8#include <i2c.h>
9#include <spl.h>
10#include <asm/io.h>
11#include <asm/arch/cpu.h>
12#include <asm/arch/soc.h>
13
14#include "ddr3_init.h"
15#include "ddr3_hw_training.h"
16#include "xor.h"
17
18#ifdef MV88F78X60
19#include "ddr3_patterns_64bit.h"
20#else
21#include "ddr3_patterns_16bit.h"
22#if defined(MV88F672X)
23#include "ddr3_patterns_16bit.h"
24#endif
25#endif
26
27/*
28 * Debug
29 */
30
31#define DEBUG_MAIN_C(s, d, l) \
32 DEBUG_MAIN_S(s); DEBUG_MAIN_D(d, l); DEBUG_MAIN_S("\n")
33#define DEBUG_MAIN_FULL_C(s, d, l) \
34 DEBUG_MAIN_FULL_S(s); DEBUG_MAIN_FULL_D(d, l); DEBUG_MAIN_FULL_S("\n")
35
36#ifdef MV_DEBUG_MAIN
37#define DEBUG_MAIN_S(s) puts(s)
38#define DEBUG_MAIN_D(d, l) printf("%x", d)
39#else
40#define DEBUG_MAIN_S(s)
41#define DEBUG_MAIN_D(d, l)
42#endif
43
44#ifdef MV_DEBUG_MAIN_FULL
45#define DEBUG_MAIN_FULL_S(s) puts(s)
46#define DEBUG_MAIN_FULL_D(d, l) printf("%x", d)
47#else
48#define DEBUG_MAIN_FULL_S(s)
49#define DEBUG_MAIN_FULL_D(d, l)
50#endif
51
52#ifdef MV_DEBUG_SUSPEND_RESUME
53#define DEBUG_SUSPEND_RESUME_S(s) puts(s)
54#define DEBUG_SUSPEND_RESUME_D(d, l) printf("%x", d)
55#else
56#define DEBUG_SUSPEND_RESUME_S(s)
57#define DEBUG_SUSPEND_RESUME_D(d, l)
58#endif
59
60static u32 ddr3_sw_wl_rl_debug;
61static u32 ddr3_run_pbs = 1;
62
63void ddr3_print_version(void)
64{
65 puts("DDR3 Training Sequence - Ver 5.7.");
66}
67
68void ddr3_set_sw_wl_rl_debug(u32 val)
69{
70 ddr3_sw_wl_rl_debug = val;
71}
72
73void ddr3_set_pbs(u32 val)
74{
75 ddr3_run_pbs = val;
76}
77
78int ddr3_hw_training(u32 target_freq, u32 ddr_width, int xor_bypass,
79 u32 scrub_offs, u32 scrub_size, int dqs_clk_aligned,
80 int debug_mode, int reg_dimm_skip_wl)
81{
82 /* A370 has no PBS mechanism */
83 __maybe_unused u32 first_loop_flag = 0;
84 u32 freq, reg;
85 MV_DRAM_INFO dram_info;
86 int ratio_2to1 = 0;
87 int tmp_ratio = 1;
88 int status;
89
90 if (debug_mode)
91 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 1\n");
92
93 memset(&dram_info, 0, sizeof(dram_info));
94 dram_info.num_cs = ddr3_get_cs_num_from_reg();
95 dram_info.cs_ena = ddr3_get_cs_ena_from_reg();
96 dram_info.target_frequency = target_freq;
97 dram_info.ddr_width = ddr_width;
98 dram_info.num_of_std_pups = ddr_width / PUP_SIZE;
99 dram_info.rl400_bug = 0;
100 dram_info.multi_cs_mr_support = 0;
101#ifdef MV88F67XX
102 dram_info.rl400_bug = 1;
103#endif
104
105 /* Ignore ECC errors - if ECC is enabled */
106 reg = reg_read(REG_SDRAM_CONFIG_ADDR);
107 if (reg & (1 << REG_SDRAM_CONFIG_ECC_OFFS)) {
108 dram_info.ecc_ena = 1;
109 reg |= (1 << REG_SDRAM_CONFIG_IERR_OFFS);
110 reg_write(REG_SDRAM_CONFIG_ADDR, reg);
111 } else {
112 dram_info.ecc_ena = 0;
113 }
114
115 reg = reg_read(REG_SDRAM_CONFIG_ADDR);
116 if (reg & (1 << REG_SDRAM_CONFIG_REGDIMM_OFFS))
117 dram_info.reg_dimm = 1;
118 else
119 dram_info.reg_dimm = 0;
120
121 dram_info.num_of_total_pups = ddr_width / PUP_SIZE + dram_info.ecc_ena;
122
123 /* Get target 2T value */
124 reg = reg_read(REG_DUNIT_CTRL_LOW_ADDR);
125 dram_info.mode_2t = (reg >> REG_DUNIT_CTRL_LOW_2T_OFFS) &
126 REG_DUNIT_CTRL_LOW_2T_MASK;
127
128 /* Get target CL value */
129#ifdef MV88F67XX
130 reg = reg_read(REG_DDR3_MR0_ADDR) >> 2;
131#else
132 reg = reg_read(REG_DDR3_MR0_CS_ADDR) >> 2;
133#endif
134
135 reg = (((reg >> 1) & 0xE) | (reg & 0x1)) & 0xF;
136 dram_info.cl = ddr3_valid_cl_to_cl(reg);
137
138 /* Get target CWL value */
139#ifdef MV88F67XX
140 reg = reg_read(REG_DDR3_MR2_ADDR) >> REG_DDR3_MR2_CWL_OFFS;
141#else
142 reg = reg_read(REG_DDR3_MR2_CS_ADDR) >> REG_DDR3_MR2_CWL_OFFS;
143#endif
144
145 reg &= REG_DDR3_MR2_CWL_MASK;
146 dram_info.cwl = reg;
147#if !defined(MV88F67XX)
148 /* A370 has no PBS mechanism */
149#if defined(MV88F78X60)
150 if ((dram_info.target_frequency > DDR_400) && (ddr3_run_pbs))
151 first_loop_flag = 1;
152#else
153 /* first_loop_flag = 1; skip mid freq at ALP/A375 */
154 if ((dram_info.target_frequency > DDR_400) && (ddr3_run_pbs) &&
155 (mv_ctrl_revision_get() >= UMC_A0))
156 first_loop_flag = 1;
157 else
158 first_loop_flag = 0;
159#endif
160#endif
161
162 freq = dram_info.target_frequency;
163
164 /* Set ODT to always on */
165 ddr3_odt_activate(1);
166
167 /* Init XOR */
168 mv_sys_xor_init(&dram_info);
169
170 /* Get DRAM/HCLK ratio */
171 if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
172 ratio_2to1 = 1;
173
174 /*
175 * Xor Bypass - ECC support in AXP is currently available for 1:1
176 * modes frequency modes.
177 * Not all frequency modes support the ddr3 training sequence
178 * (Only 1200/300).
179 * Xor Bypass allows using the Xor initializations and scrubbing
180 * inside the ddr3 training sequence without running the training
181 * itself.
182 */
183 if (xor_bypass == 0) {
184 if (ddr3_run_pbs) {
185 DEBUG_MAIN_S("DDR3 Training Sequence - Run with PBS.\n");
186 } else {
187 DEBUG_MAIN_S("DDR3 Training Sequence - Run without PBS.\n");
188 }
189
190 if (dram_info.target_frequency > DFS_MARGIN) {
191 tmp_ratio = 0;
192 freq = DDR_100;
193
194 if (dram_info.reg_dimm == 1)
195 freq = DDR_300;
196
197 if (MV_OK != ddr3_dfs_high_2_low(freq, &dram_info)) {
198 /* Set low - 100Mhz DDR Frequency by HW */
199 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs High2Low)\n");
200 return MV_DDR3_TRAINING_ERR_DFS_H2L;
201 }
202
203 if ((dram_info.reg_dimm == 1) &&
204 (reg_dimm_skip_wl == 0)) {
205 if (MV_OK !=
206 ddr3_write_leveling_hw_reg_dimm(freq,
207 &dram_info))
208 DEBUG_MAIN_S("DDR3 Training Sequence - Registered DIMM Low WL - SKIP\n");
209 }
210
211 if (ddr3_get_log_level() >= MV_LOG_LEVEL_1)
212 ddr3_print_freq(freq);
213
214 if (debug_mode)
215 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 2\n");
216 } else {
217 if (!dqs_clk_aligned) {
218#ifdef MV88F67XX
219 /*
220 * If running training sequence without DFS,
221 * we must run Write leveling before writing
222 * the patterns
223 */
224
225 /*
226 * ODT - Multi CS system use SW WL,
227 * Single CS System use HW WL
228 */
229 if (dram_info.cs_ena > 1) {
230 if (MV_OK !=
231 ddr3_write_leveling_sw(
232 freq, tmp_ratio,
233 &dram_info)) {
234 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
235 return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
236 }
237 } else {
238 if (MV_OK !=
239 ddr3_write_leveling_hw(freq,
240 &dram_info)) {
241 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
242 return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
243 }
244 }
245#else
246 if (MV_OK != ddr3_write_leveling_hw(
247 freq, &dram_info)) {
248 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
249 if (ddr3_sw_wl_rl_debug) {
250 if (MV_OK !=
251 ddr3_write_leveling_sw(
252 freq, tmp_ratio,
253 &dram_info)) {
254 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
255 return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
256 }
257 } else {
258 return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
259 }
260 }
261#endif
262 }
263
264 if (debug_mode)
265 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 3\n");
266 }
267
268 if (MV_OK != ddr3_load_patterns(&dram_info, 0)) {
269 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Loading Patterns)\n");
270 return MV_DDR3_TRAINING_ERR_LOAD_PATTERNS;
271 }
272
273 /*
274 * TODO:
275 * The mainline U-Boot port of the bin_hdr DDR training code
276 * needs a delay of minimum 20ms here (10ms is a bit too short
277 * and the CPU hangs). The bin_hdr code doesn't have this delay.
278 * To be save here, lets add a delay of 50ms here.
279 *
280 * Tested on the Marvell DB-MV784MP-GP board
281 */
282 mdelay(50);
283
284 do {
285 freq = dram_info.target_frequency;
286 tmp_ratio = ratio_2to1;
287 DEBUG_MAIN_FULL_S("DDR3 Training Sequence - DEBUG - 4\n");
288
289#if defined(MV88F78X60)
290 /*
291 * There is a difference on the DFS frequency at the
292 * first iteration of this loop
293 */
294 if (first_loop_flag) {
295 freq = DDR_400;
296 tmp_ratio = 0;
297 }
298#endif
299
300 if (MV_OK != ddr3_dfs_low_2_high(freq, tmp_ratio,
301 &dram_info)) {
302 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs Low2High)\n");
303 return MV_DDR3_TRAINING_ERR_DFS_H2L;
304 }
305
306 if (ddr3_get_log_level() >= MV_LOG_LEVEL_1) {
307 ddr3_print_freq(freq);
308 }
309
310 if (debug_mode)
311 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 5\n");
312
313 /* Write leveling */
314 if (!dqs_clk_aligned) {
315#ifdef MV88F67XX
316 /*
317 * ODT - Multi CS system that not support Multi
318 * CS MRS commands must use SW WL
319 */
320 if (dram_info.cs_ena > 1) {
321 if (MV_OK != ddr3_write_leveling_sw(
322 freq, tmp_ratio, &dram_info)) {
323 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
324 return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
325 }
326 } else {
327 if (MV_OK != ddr3_write_leveling_hw(
328 freq, &dram_info)) {
329 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
330 return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
331 }
332 }
333#else
334 if ((dram_info.reg_dimm == 1) &&
335 (freq == DDR_400)) {
336 if (reg_dimm_skip_wl == 0) {
337 if (MV_OK != ddr3_write_leveling_hw_reg_dimm(
338 freq, &dram_info))
339 DEBUG_MAIN_S("DDR3 Training Sequence - Registered DIMM WL - SKIP\n");
340 }
341 } else {
342 if (MV_OK != ddr3_write_leveling_hw(
343 freq, &dram_info)) {
344 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
345 if (ddr3_sw_wl_rl_debug) {
346 if (MV_OK != ddr3_write_leveling_sw(
347 freq, tmp_ratio, &dram_info)) {
348 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Sw)\n");
349 return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
350 }
351 } else {
352 return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
353 }
354 }
355 }
356#endif
357 if (debug_mode)
358 DEBUG_MAIN_S
359 ("DDR3 Training Sequence - DEBUG - 6\n");
360 }
361
362 /* Read Leveling */
363 /*
364 * Armada 370 - Support for HCLK @ 400MHZ - must use
365 * SW read leveling
366 */
367 if (freq == DDR_400 && dram_info.rl400_bug) {
368 status = ddr3_read_leveling_sw(freq, tmp_ratio,
369 &dram_info);
370 if (MV_OK != status) {
371 DEBUG_MAIN_S
372 ("DDR3 Training Sequence - FAILED (Read Leveling Sw)\n");
373 return status;
374 }
375 } else {
376 if (MV_OK != ddr3_read_leveling_hw(
377 freq, &dram_info)) {
378 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Hw)\n");
379 if (ddr3_sw_wl_rl_debug) {
380 if (MV_OK != ddr3_read_leveling_sw(
381 freq, tmp_ratio,
382 &dram_info)) {
383 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Sw)\n");
384 return MV_DDR3_TRAINING_ERR_WR_LVL_SW;
385 }
386 } else {
387 return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
388 }
389 }
390 }
391
392 if (debug_mode)
393 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 7\n");
394
395 if (MV_OK != ddr3_wl_supplement(&dram_info)) {
396 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hi-Freq Sup)\n");
397 return MV_DDR3_TRAINING_ERR_WR_LVL_HI_FREQ;
398 }
399
400 if (debug_mode)
401 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 8\n");
402#if !defined(MV88F67XX)
403 /* A370 has no PBS mechanism */
404#if defined(MV88F78X60) || defined(MV88F672X)
405 if (first_loop_flag == 1) {
406 first_loop_flag = 0;
407
408 status = MV_OK;
409 status = ddr3_pbs_rx(&dram_info);
410 if (MV_OK != status) {
411 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (PBS RX)\n");
412 return status;
413 }
414
415 if (debug_mode)
416 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 9\n");
417
418 status = ddr3_pbs_tx(&dram_info);
419 if (MV_OK != status) {
420 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (PBS TX)\n");
421 return status;
422 }
423
424 if (debug_mode)
425 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 10\n");
426 }
427#endif
428#endif
429 } while (freq != dram_info.target_frequency);
430
431 status = ddr3_dqs_centralization_rx(&dram_info);
432 if (MV_OK != status) {
433 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (DQS Centralization RX)\n");
434 return status;
435 }
436
437 if (debug_mode)
438 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 11\n");
439
440 status = ddr3_dqs_centralization_tx(&dram_info);
441 if (MV_OK != status) {
442 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (DQS Centralization TX)\n");
443 return status;
444 }
445
446 if (debug_mode)
447 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 12\n");
448 }
449
450 ddr3_set_performance_params(&dram_info);
451
452 if (dram_info.ecc_ena) {
453 /* Need to SCRUB the DRAM memory area to load U-boot */
454 mv_sys_xor_finish();
455 dram_info.num_cs = 1;
456 dram_info.cs_ena = 1;
457 mv_sys_xor_init(&dram_info);
458 mv_xor_mem_init(0, scrub_offs, scrub_size, 0xdeadbeef,
459 0xdeadbeef);
460
461 /* Wait for previous transfer completion */
462 while (mv_xor_state_get(0) != MV_IDLE)
463 ;
464
465 if (debug_mode)
466 DEBUG_MAIN_S("DDR3 Training Sequence - DEBUG - 13\n");
467 }
468
469 /* Return XOR State */
470 mv_sys_xor_finish();
471
472#if defined(MV88F78X60)
473 /* Save training results in memeory for resume state */
474 ddr3_save_training(&dram_info);
475#endif
476 /* Clear ODT always on */
477 ddr3_odt_activate(0);
478
479 /* Configure Dynamic read ODT */
480 ddr3_odt_read_dynamic_config(&dram_info);
481
482 return MV_OK;
483}
484
485void ddr3_set_performance_params(MV_DRAM_INFO *dram_info)
486{
487 u32 twr2wr, trd2rd, trd2wr_wr2rd;
488 u32 tmp1, tmp2, reg;
489
490 DEBUG_MAIN_FULL_C("Max WL Phase: ", dram_info->wl_max_phase, 2);
491 DEBUG_MAIN_FULL_C("Min WL Phase: ", dram_info->wl_min_phase, 2);
492 DEBUG_MAIN_FULL_C("Max RL Phase: ", dram_info->rl_max_phase, 2);
493 DEBUG_MAIN_FULL_C("Min RL Phase: ", dram_info->rl_min_phase, 2);
494
495 if (dram_info->wl_max_phase < 2)
496 twr2wr = 0x2;
497 else
498 twr2wr = 0x3;
499
500 trd2rd = 0x1 + (dram_info->rl_max_phase + 1) / 2 +
501 (dram_info->rl_max_phase + 1) % 2;
502
503 tmp1 = (dram_info->rl_max_phase - dram_info->wl_min_phase) / 2 +
504 (((dram_info->rl_max_phase - dram_info->wl_min_phase) % 2) >
505 0 ? 1 : 0);
506 tmp2 = (dram_info->wl_max_phase - dram_info->rl_min_phase) / 2 +
507 ((dram_info->wl_max_phase - dram_info->rl_min_phase) % 2 >
508 0 ? 1 : 0);
509 trd2wr_wr2rd = (tmp1 >= tmp2) ? tmp1 : tmp2;
510
511 trd2wr_wr2rd += 2;
512 trd2rd += 2;
513 twr2wr += 2;
514
515 DEBUG_MAIN_FULL_C("WR 2 WR: ", twr2wr, 2);
516 DEBUG_MAIN_FULL_C("RD 2 RD: ", trd2rd, 2);
517 DEBUG_MAIN_FULL_C("RD 2 WR / WR 2 RD: ", trd2wr_wr2rd, 2);
518
519 reg = reg_read(REG_SDRAM_TIMING_HIGH_ADDR);
520
521 reg &= ~(REG_SDRAM_TIMING_H_W2W_MASK << REG_SDRAM_TIMING_H_W2W_OFFS);
522 reg |= ((twr2wr & REG_SDRAM_TIMING_H_W2W_MASK) <<
523 REG_SDRAM_TIMING_H_W2W_OFFS);
524
525 reg &= ~(REG_SDRAM_TIMING_H_R2R_MASK << REG_SDRAM_TIMING_H_R2R_OFFS);
526 reg &= ~(REG_SDRAM_TIMING_H_R2R_H_MASK <<
527 REG_SDRAM_TIMING_H_R2R_H_OFFS);
528 reg |= ((trd2rd & REG_SDRAM_TIMING_H_R2R_MASK) <<
529 REG_SDRAM_TIMING_H_R2R_OFFS);
530 reg |= (((trd2rd >> 2) & REG_SDRAM_TIMING_H_R2R_H_MASK) <<
531 REG_SDRAM_TIMING_H_R2R_H_OFFS);
532
533 reg &= ~(REG_SDRAM_TIMING_H_R2W_W2R_MASK <<
534 REG_SDRAM_TIMING_H_R2W_W2R_OFFS);
535 reg &= ~(REG_SDRAM_TIMING_H_R2W_W2R_H_MASK <<
536 REG_SDRAM_TIMING_H_R2W_W2R_H_OFFS);
537 reg |= ((trd2wr_wr2rd & REG_SDRAM_TIMING_H_R2W_W2R_MASK) <<
538 REG_SDRAM_TIMING_H_R2W_W2R_OFFS);
539 reg |= (((trd2wr_wr2rd >> 2) & REG_SDRAM_TIMING_H_R2W_W2R_H_MASK) <<
540 REG_SDRAM_TIMING_H_R2W_W2R_H_OFFS);
541
542 reg_write(REG_SDRAM_TIMING_HIGH_ADDR, reg);
543}
544
545/*
546 * Perform DDR3 PUP Indirect Write
547 */
548void ddr3_write_pup_reg(u32 mode, u32 cs, u32 pup, u32 phase, u32 delay)
549{
550 u32 reg = 0;
551
552 if (pup == PUP_BC)
553 reg |= (1 << REG_PHY_BC_OFFS);
554 else
555 reg |= (pup << REG_PHY_PUP_OFFS);
556
557 reg |= ((0x4 * cs + mode) << REG_PHY_CS_OFFS);
558 reg |= (phase << REG_PHY_PHASE_OFFS) | delay;
559
560 if (mode == PUP_WL_MODE)
561 reg |= ((INIT_WL_DELAY + delay) << REG_PHY_DQS_REF_DLY_OFFS);
562
563 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
564 reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
565 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
566
567 do {
568 reg = reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR) &
569 REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
570 } while (reg); /* Wait for '0' to mark the end of the transaction */
571
572 /* If read Leveling mode - need to write to register 3 separetly */
573 if (mode == PUP_RL_MODE) {
574 reg = 0;
575
576 if (pup == PUP_BC)
577 reg |= (1 << REG_PHY_BC_OFFS);
578 else
579 reg |= (pup << REG_PHY_PUP_OFFS);
580
581 reg |= ((0x4 * cs + mode + 1) << REG_PHY_CS_OFFS);
582 reg |= (INIT_RL_DELAY);
583
584 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
585 reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
586 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
587
588 do {
589 reg = reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR) &
590 REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
591 } while (reg);
592 }
593}
594
595/*
596 * Perform DDR3 PUP Indirect Read
597 */
598u32 ddr3_read_pup_reg(u32 mode, u32 cs, u32 pup)
599{
600 u32 reg;
601
602 reg = (pup << REG_PHY_PUP_OFFS) |
603 ((0x4 * cs + mode) << REG_PHY_CS_OFFS);
604 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
605
606 reg |= REG_PHY_REGISTRY_FILE_ACCESS_OP_RD;
607 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, reg); /* 0x16A0 */
608
609 do {
610 reg = reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR) &
611 REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
612 } while (reg); /* Wait for '0' to mark the end of the transaction */
613
614 return reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR); /* 0x16A0 */
615}
616
617/*
618 * Set training patterns
619 */
620int ddr3_load_patterns(MV_DRAM_INFO *dram_info, int resume)
621{
622 u32 reg;
623
624 /* Enable SW override - Required for the ECC Pup */
625 reg = reg_read(REG_DRAM_TRAINING_2_ADDR) |
626 (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
627
628 /* [0] = 1 - Enable SW override */
629 /* 0x15B8 - Training SW 2 Register */
630 reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
631
632 reg = (1 << REG_DRAM_TRAINING_AUTO_OFFS);
633 reg_write(REG_DRAM_TRAINING_ADDR, reg); /* 0x15B0 - Training Register */
634
635 if (resume == 0) {
636#if defined(MV88F78X60) || defined(MV88F672X)
637 ddr3_load_pbs_patterns(dram_info);
638#endif
639 ddr3_load_dqs_patterns(dram_info);
640 }
641
642 /* Disable SW override - Must be in a different stage */
643 /* [0]=0 - Enable SW override */
644 reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
645 reg &= ~(1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS);
646 /* 0x15B8 - Training SW 2 Register */
647 reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
648
649 reg = reg_read(REG_DRAM_TRAINING_1_ADDR) |
650 (1 << REG_DRAM_TRAINING_1_TRNBPOINT_OFFS);
651 reg_write(REG_DRAM_TRAINING_1_ADDR, reg);
652
653 /* Set Base Addr */
654#if defined(MV88F67XX)
655 reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR, 0);
656#else
657 if (resume == 0)
658 reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR, 0);
659 else
660 reg_write(REG_DRAM_TRAINING_PATTERN_BASE_ADDR,
661 RESUME_RL_PATTERNS_ADDR);
662#endif
663
664 /* Set Patterns */
665 if (resume == 0) {
666 reg = (dram_info->cs_ena << REG_DRAM_TRAINING_CS_OFFS) |
667 (1 << REG_DRAM_TRAINING_PATTERNS_OFFS);
668 } else {
669 reg = (0x1 << REG_DRAM_TRAINING_CS_OFFS) |
670 (1 << REG_DRAM_TRAINING_PATTERNS_OFFS);
671 }
672
673 reg |= (1 << REG_DRAM_TRAINING_AUTO_OFFS);
674
675 reg_write(REG_DRAM_TRAINING_ADDR, reg);
676
677 udelay(100);
678
679 /* Check if Successful */
680 if (reg_read(REG_DRAM_TRAINING_ADDR) &
681 (1 << REG_DRAM_TRAINING_ERROR_OFFS))
682 return MV_OK;
683 else
684 return MV_FAIL;
685}
686
687#if !defined(MV88F67XX)
688/*
689 * Name: ddr3_save_training(MV_DRAM_INFO *dram_info)
690 * Desc: saves the training results to memeory (RL,WL,PBS,Rx/Tx
691 * Centeralization)
692 * Args: MV_DRAM_INFO *dram_info
693 * Notes:
694 * Returns: None.
695 */
696void ddr3_save_training(MV_DRAM_INFO *dram_info)
697{
698 u32 val, pup, tmp_cs, cs, i, dq;
699 u32 crc = 0;
700 u32 regs = 0;
701 u32 *sdram_offset = (u32 *)RESUME_TRAINING_VALUES_ADDR;
702 u32 mode_config[MAX_TRAINING_MODE];
703
704 mode_config[DQS_WR_MODE] = PUP_DQS_WR;
705 mode_config[WL_MODE_] = PUP_WL_MODE;
706 mode_config[RL_MODE_] = PUP_RL_MODE;
707 mode_config[DQS_RD_MODE] = PUP_DQS_RD;
708 mode_config[PBS_TX_DM_MODE] = PUP_PBS_TX_DM;
709 mode_config[PBS_TX_MODE] = PUP_PBS_TX;
710 mode_config[PBS_RX_MODE] = PUP_PBS_RX;
711
712 /* num of training modes */
713 for (i = 0; i < MAX_TRAINING_MODE; i++) {
714 tmp_cs = dram_info->cs_ena;
715 /* num of CS */
716 for (cs = 0; cs < MAX_CS; cs++) {
717 if (tmp_cs & (1 << cs)) {
718 /* num of PUPs */
719 for (pup = 0; pup < dram_info->num_of_total_pups;
720 pup++) {
721 if (pup == dram_info->num_of_std_pups &&
722 dram_info->ecc_ena)
723 pup = ECC_PUP;
724 if (i == PBS_TX_DM_MODE) {
725 /*
726 * Change CS bitmask because
727 * PBS works only with CS0
728 */
729 tmp_cs = 0x1;
730 val = ddr3_read_pup_reg(
731 mode_config[i], CS0, pup);
732 } else if (i == PBS_TX_MODE ||
733 i == PBS_RX_MODE) {
734 /*
735 * Change CS bitmask because
736 * PBS works only with CS0
737 */
738 tmp_cs = 0x1;
739 for (dq = 0; dq <= DQ_NUM;
740 dq++) {
741 val = ddr3_read_pup_reg(
742 mode_config[i] + dq,
743 CS0,
744 pup);
745 (*sdram_offset) = val;
746 crc += *sdram_offset;
747 sdram_offset++;
748 regs++;
749 }
750 continue;
751 } else {
752 val = ddr3_read_pup_reg(
753 mode_config[i], cs, pup);
754 }
755
756 *sdram_offset = val;
757 crc += *sdram_offset;
758 sdram_offset++;
759 regs++;
760 }
761 }
762 }
763 }
764
765 *sdram_offset = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
766 crc += *sdram_offset;
767 sdram_offset++;
768 regs++;
769 *sdram_offset = reg_read(REG_READ_DATA_READY_DELAYS_ADDR);
770 crc += *sdram_offset;
771 sdram_offset++;
772 regs++;
773 sdram_offset = (u32 *)NUM_OF_REGISTER_ADDR;
774 *sdram_offset = regs;
775 DEBUG_SUSPEND_RESUME_S("Training Results CheckSum write= ");
776 DEBUG_SUSPEND_RESUME_D(crc, 8);
777 DEBUG_SUSPEND_RESUME_S("\n");
778 sdram_offset = (u32 *)CHECKSUM_RESULT_ADDR;
779 *sdram_offset = crc;
780}
781
782/*
783 * Name: ddr3_read_training_results()
784 * Desc: Reads the training results from memeory (RL,WL,PBS,Rx/Tx
785 * Centeralization)
786 * and writes them to the relevant registers
787 * Args: MV_DRAM_INFO *dram_info
788 * Notes:
789 * Returns: None.
790 */
791int ddr3_read_training_results(void)
792{
793 u32 val, reg, idx, dqs_wr_idx = 0, crc = 0;
794 u32 *sdram_offset = (u32 *)RESUME_TRAINING_VALUES_ADDR;
795 u32 training_val[RESUME_TRAINING_VALUES_MAX] = { 0 };
796 u32 regs = *((u32 *)NUM_OF_REGISTER_ADDR);
797
798 /*
799 * Read Training results & Dunit registers from memory and write
800 * it to an array
801 */
802 for (idx = 0; idx < regs; idx++) {
803 training_val[idx] = *sdram_offset;
804 crc += *sdram_offset;
805 sdram_offset++;
806 }
807
808 sdram_offset = (u32 *)CHECKSUM_RESULT_ADDR;
809
810 if ((*sdram_offset) == crc) {
811 DEBUG_SUSPEND_RESUME_S("Training Results CheckSum read PASS= ");
812 DEBUG_SUSPEND_RESUME_D(crc, 8);
813 DEBUG_SUSPEND_RESUME_S("\n");
814 } else {
815 DEBUG_MAIN_S("Wrong Training Results CheckSum\n");
816 return MV_FAIL;
817 }
818
819 /*
820 * We iterate through all the registers except for the last 2 since
821 * they are Dunit registers (and not PHY registers)
822 */
823 for (idx = 0; idx < (regs - 2); idx++) {
824 val = training_val[idx];
825 reg = (val >> REG_PHY_CS_OFFS) & 0x3F; /*read the phy address */
826
827 /* Check if the values belongs to the DQS WR */
828 if (reg == PUP_WL_MODE) {
829 /* bit[5:0] in DQS_WR are delay */
830 val = (training_val[dqs_wr_idx++] & 0x3F);
831 /*
832 * bit[15:10] are DQS_WR delay & bit[9:0] are
833 * WL phase & delay
834 */
835 val = (val << REG_PHY_DQS_REF_DLY_OFFS) |
836 (training_val[idx] & 0x3C003FF);
837 /* Add Request pending and write operation bits */
838 val |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
839 } else if (reg == PUP_DQS_WR) {
840 /*
841 * Do nothing since DQS_WR will be done in PUP_WL_MODE
842 */
843 continue;
844 }
845
846 val |= REG_PHY_REGISTRY_FILE_ACCESS_OP_WR;
847 reg_write(REG_PHY_REGISTRY_FILE_ACCESS_ADDR, val);
848 do {
849 val = (reg_read(REG_PHY_REGISTRY_FILE_ACCESS_ADDR)) &
850 REG_PHY_REGISTRY_FILE_ACCESS_OP_DONE;
851 } while (val); /* Wait for '0' to mark the end of the transaction */
852 }
853
854 /* write last 2 Dunit configurations */
855 val = training_val[idx];
856 reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, val); /* reg 0x1538 */
857 val = training_val[idx + 1];
858 reg_write(REG_READ_DATA_READY_DELAYS_ADDR, val); /* reg 0x153c */
859
860 return MV_OK;
861}
862
863/*
864 * Name: ddr3_check_if_resume_mode()
865 * Desc: Reads the address (0x3000) of the Resume Magic word (0xDEADB002)
866 * Args: MV_DRAM_INFO *dram_info
867 * Notes:
868 * Returns: return (magic_word == SUSPEND_MAGIC_WORD)
869 */
870int ddr3_check_if_resume_mode(MV_DRAM_INFO *dram_info, u32 freq)
871{
872 u32 magic_word;
873 u32 *sdram_offset = (u32 *)BOOT_INFO_ADDR;
874
875 if (dram_info->reg_dimm != 1) {
876 /*
877 * Perform write levleling in order initiate the phy with
878 * low frequency
879 */
880 if (MV_OK != ddr3_write_leveling_hw(freq, dram_info)) {
881 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Write Leveling Hw)\n");
882 return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
883 }
884 }
885
886 if (MV_OK != ddr3_load_patterns(dram_info, 1)) {
887 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Loading Patterns)\n");
888 return MV_DDR3_TRAINING_ERR_LOAD_PATTERNS;
889 }
890
891 /* Enable CS0 only for RL */
892 dram_info->cs_ena = 0x1;
893
894 /* Perform Read levleling in order to get stable memory */
895 if (MV_OK != ddr3_read_leveling_hw(freq, dram_info)) {
896 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Read Leveling Hw)\n");
897 return MV_DDR3_TRAINING_ERR_WR_LVL_HW;
898 }
899
900 /* Back to relevant CS */
901 dram_info->cs_ena = ddr3_get_cs_ena_from_reg();
902
903 magic_word = *sdram_offset;
904 return magic_word == SUSPEND_MAGIC_WORD;
905}
906
907/*
908 * Name: ddr3_training_suspend_resume()
909 * Desc: Execute the Resume state
910 * Args: MV_DRAM_INFO *dram_info
911 * Notes:
912 * Returns: return (magic_word == SUSPEND_MAGIC_WORD)
913 */
914int ddr3_training_suspend_resume(MV_DRAM_INFO *dram_info)
915{
916 u32 freq, reg;
917 int tmp_ratio;
918
919 /* Configure DDR */
920 if (MV_OK != ddr3_read_training_results())
921 return MV_FAIL;
922
923 /* Reset read FIFO */
924 reg = reg_read(REG_DRAM_TRAINING_ADDR);
925
926 /* Start Auto Read Leveling procedure */
927 reg |= (1 << REG_DRAM_TRAINING_RL_OFFS);
928 reg_write(REG_DRAM_TRAINING_ADDR, reg); /* 0x15B0 - Training Register */
929
930 reg = reg_read(REG_DRAM_TRAINING_2_ADDR);
931 reg |= ((1 << REG_DRAM_TRAINING_2_FIFO_RST_OFFS) +
932 (1 << REG_DRAM_TRAINING_2_SW_OVRD_OFFS));
933
934 /* [0] = 1 - Enable SW override, [4] = 1 - FIFO reset */
935 /* 0x15B8 - Training SW 2 Register */
936 reg_write(REG_DRAM_TRAINING_2_ADDR, reg);
937
938 udelay(2);
939
940 reg = reg_read(REG_DRAM_TRAINING_ADDR);
941 /* Clear Auto Read Leveling procedure */
942 reg &= ~(1 << REG_DRAM_TRAINING_RL_OFFS);
943 reg_write(REG_DRAM_TRAINING_ADDR, reg); /* 0x15B0 - Training Register */
944
945 /* Return to target frequency */
946 freq = dram_info->target_frequency;
947 tmp_ratio = 1;
948 if (MV_OK != ddr3_dfs_low_2_high(freq, tmp_ratio, dram_info)) {
949 DEBUG_MAIN_S("DDR3 Training Sequence - FAILED (Dfs Low2High)\n");
950 return MV_DDR3_TRAINING_ERR_DFS_H2L;
951 }
952
953 if (dram_info->ecc_ena) {
954 /* Scabbling the RL area pattern and the training area */
955 mv_sys_xor_finish();
956 dram_info->num_cs = 1;
957 dram_info->cs_ena = 1;
958 mv_sys_xor_init(dram_info);
959 mv_xor_mem_init(0, RESUME_RL_PATTERNS_ADDR,
960 RESUME_RL_PATTERNS_SIZE, 0xFFFFFFFF, 0xFFFFFFFF);
961
962 /* Wait for previous transfer completion */
963
964 while (mv_xor_state_get(0) != MV_IDLE)
965 ;
966
967 /* Return XOR State */
968 mv_sys_xor_finish();
969 }
970
971 return MV_OK;
972}
973#endif
974
975void ddr3_print_freq(u32 freq)
976{
977 u32 tmp_freq;
978
979 switch (freq) {
980 case 0:
981 tmp_freq = 100;
982 break;
983 case 1:
984 tmp_freq = 300;
985 break;
986 case 2:
987 tmp_freq = 360;
988 break;
989 case 3:
990 tmp_freq = 400;
991 break;
992 case 4:
993 tmp_freq = 444;
994 break;
995 case 5:
996 tmp_freq = 500;
997 break;
998 case 6:
999 tmp_freq = 533;
1000 break;
1001 case 7:
1002 tmp_freq = 600;
1003 break;
1004 case 8:
1005 tmp_freq = 666;
1006 break;
1007 case 9:
1008 tmp_freq = 720;
1009 break;
1010 case 10:
1011 tmp_freq = 800;
1012 break;
1013 default:
1014 tmp_freq = 100;
1015 }
1016
1017 printf("Current frequency is: %dMHz\n", tmp_freq);
1018}
1019
1020int ddr3_get_min_max_read_sample_delay(u32 cs_enable, u32 reg, u32 *min,
1021 u32 *max, u32 *cs_max)
1022{
1023 u32 cs, delay;
1024
1025 *min = 0xFFFFFFFF;
1026 *max = 0x0;
1027
1028 for (cs = 0; cs < MAX_CS; cs++) {
1029 if ((cs_enable & (1 << cs)) == 0)
1030 continue;
1031
1032 delay = ((reg >> (cs * 8)) & 0x1F);
1033
1034 if (delay < *min)
1035 *min = delay;
1036
1037 if (delay > *max) {
1038 *max = delay;
1039 *cs_max = cs;
1040 }
1041 }
1042
1043 return MV_OK;
1044}
1045
1046int ddr3_get_min_max_rl_phase(MV_DRAM_INFO *dram_info, u32 *min, u32 *max,
1047 u32 cs)
1048{
1049 u32 pup, reg, phase;
1050
1051 *min = 0xFFFFFFFF;
1052 *max = 0x0;
1053
1054 for (pup = 0; pup < dram_info->num_of_total_pups; pup++) {
1055 reg = ddr3_read_pup_reg(PUP_RL_MODE, cs, pup);
1056 phase = ((reg >> 8) & 0x7);
1057
1058 if (phase < *min)
1059 *min = phase;
1060
1061 if (phase > *max)
1062 *max = phase;
1063 }
1064
1065 return MV_OK;
1066}
1067
1068int ddr3_odt_activate(int activate)
1069{
1070 u32 reg, mask;
1071
1072 mask = (1 << REG_DUNIT_ODT_CTRL_OVRD_OFFS) |
1073 (1 << REG_DUNIT_ODT_CTRL_OVRD_VAL_OFFS);
1074 /* {0x0000149C} - DDR Dunit ODT Control Register */
1075 reg = reg_read(REG_DUNIT_ODT_CTRL_ADDR);
1076 if (activate)
1077 reg |= mask;
1078 else
1079 reg &= ~mask;
1080
1081 reg_write(REG_DUNIT_ODT_CTRL_ADDR, reg);
1082
1083 return MV_OK;
1084}
1085
1086int ddr3_odt_read_dynamic_config(MV_DRAM_INFO *dram_info)
1087{
1088 u32 min_read_sample_delay, max_read_sample_delay, max_rl_phase;
1089 u32 min, max, cs_max;
1090 u32 cs_ena, reg;
1091
1092 reg = reg_read(REG_READ_DATA_SAMPLE_DELAYS_ADDR);
1093 cs_ena = ddr3_get_cs_ena_from_reg();
1094
1095 /* Get minimum and maximum of read sample delay of all CS */
1096 ddr3_get_min_max_read_sample_delay(cs_ena, reg, &min_read_sample_delay,
1097 &max_read_sample_delay, &cs_max);
1098
1099 /*
1100 * Get minimum and maximum read leveling phase which belongs to the
1101 * maximal read sample delay
1102 */
1103 ddr3_get_min_max_rl_phase(dram_info, &min, &max, cs_max);
1104 max_rl_phase = max;
1105
1106 /* DDR ODT Timing (Low) Register calculation */
1107 reg = reg_read(REG_ODT_TIME_LOW_ADDR);
1108 reg &= ~(0x1FF << REG_ODT_ON_CTL_RD_OFFS);
1109 reg |= (((min_read_sample_delay - 1) & 0xF) << REG_ODT_ON_CTL_RD_OFFS);
1110 reg |= (((max_read_sample_delay + 4 + (((max_rl_phase + 1) / 2) + 1)) &
1111 0x1F) << REG_ODT_OFF_CTL_RD_OFFS);
1112 reg_write(REG_ODT_TIME_LOW_ADDR, reg);
1113
1114 return MV_OK;
1115}