blob: 3a9e81f1b79bcc0389d0c05ad8e8d55e694fb732 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Stefan Roese5ffceb82015-03-26 15:36:56 +01002/*
3 * Copyright (C) Marvell International Ltd. and its affiliates
Stefan Roese5ffceb82015-03-26 15:36:56 +01004 */
5
6#include <common.h>
7#include <spl.h>
8#include <asm/io.h>
9#include <asm/arch/cpu.h>
10#include <asm/arch/soc.h>
11
12#include "ddr3_init.h"
13
14#define WL_ITERATION_NUM 10
15#define ONE_CLOCK_ERROR_SHIFT 2
16#define ALIGN_ERROR_SHIFT -2
17
18static u32 pup_mask_table[] = {
19 0x000000ff,
20 0x0000ff00,
21 0x00ff0000,
22 0xff000000
23};
24
25static struct write_supp_result wr_supp_res[MAX_INTERFACE_NUM][MAX_BUS_NUM];
26
27static int ddr3_tip_dynamic_write_leveling_seq(u32 dev_num);
28static int ddr3_tip_dynamic_read_leveling_seq(u32 dev_num);
29static int ddr3_tip_dynamic_per_bit_read_leveling_seq(u32 dev_num);
30static int ddr3_tip_wl_supp_align_err_shift(u32 dev_num, u32 if_id, u32 bus_id,
31 u32 bus_id_delta);
32static int ddr3_tip_wl_supp_align_phase_shift(u32 dev_num, u32 if_id,
33 u32 bus_id, u32 offset,
34 u32 bus_id_delta);
35static int ddr3_tip_xsb_compare_test(u32 dev_num, u32 if_id, u32 bus_id,
36 u32 edge_offset, u32 bus_id_delta);
37static int ddr3_tip_wl_supp_one_clk_err_shift(u32 dev_num, u32 if_id,
38 u32 bus_id, u32 bus_id_delta);
39
40u32 hws_ddr3_tip_max_cs_get(void)
41{
42 u32 c_cs;
43 static u32 max_cs;
44 struct hws_topology_map *tm = ddr3_get_topology_map();
45
46 if (!max_cs) {
47 for (c_cs = 0; c_cs < NUM_OF_CS; c_cs++) {
48 VALIDATE_ACTIVE(tm->
49 interface_params[0].as_bus_params[0].
50 cs_bitmask, c_cs);
51 max_cs++;
52 }
53 }
54
55 return max_cs;
56}
57
58/*****************************************************************************
59Dynamic read leveling
60******************************************************************************/
61int ddr3_tip_dynamic_read_leveling(u32 dev_num, u32 freq)
62{
63 u32 data, mask;
64 u32 max_cs = hws_ddr3_tip_max_cs_get();
65 u32 bus_num, if_id, cl_val;
66 enum hws_speed_bin speed_bin_index;
67 /* save current CS value */
68 u32 cs_enable_reg_val[MAX_INTERFACE_NUM] = { 0 };
69 int is_any_pup_fail = 0;
70 u32 data_read[MAX_INTERFACE_NUM + 1] = { 0 };
71 u8 rl_values[NUM_OF_CS][MAX_BUS_NUM][MAX_INTERFACE_NUM];
72 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
73 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
74 struct hws_topology_map *tm = ddr3_get_topology_map();
75
76 if (rl_version == 0) {
77 /* OLD RL machine */
78 data = 0x40;
79 data |= (1 << 20);
80
81 /* TBD multi CS */
82 CHECK_STATUS(ddr3_tip_if_write(
83 dev_num, ACCESS_TYPE_MULTICAST,
84 PARAM_NOT_CARE, TRAINING_REG,
85 data, 0x11ffff));
86 CHECK_STATUS(ddr3_tip_if_write(
87 dev_num, ACCESS_TYPE_MULTICAST,
88 PARAM_NOT_CARE,
89 TRAINING_PATTERN_BASE_ADDRESS_REG,
90 0, 0xfffffff8));
91 CHECK_STATUS(ddr3_tip_if_write(
92 dev_num, ACCESS_TYPE_MULTICAST,
93 PARAM_NOT_CARE, TRAINING_REG,
94 (u32)(1 << 31), (u32)(1 << 31)));
95
96 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
97 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
98 training_result[training_stage][if_id] = TEST_SUCCESS;
99 if (ddr3_tip_if_polling
100 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
101 (u32)(1 << 31), TRAINING_REG,
102 MAX_POLLING_ITERATIONS) != MV_OK) {
103 DEBUG_LEVELING(
104 DEBUG_LEVEL_ERROR,
105 ("RL: DDR3 poll failed(1) IF %d\n",
106 if_id));
107 training_result[training_stage][if_id] =
108 TEST_FAILED;
109
110 if (debug_mode == 0)
111 return MV_FAIL;
112 }
113 }
114
115 /* read read-leveling result */
116 CHECK_STATUS(ddr3_tip_if_read
117 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
118 TRAINING_REG, data_read, 1 << 30));
119 /* exit read leveling mode */
120 CHECK_STATUS(ddr3_tip_if_write
121 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
122 TRAINING_SW_2_REG, 0x8, 0x9));
123 CHECK_STATUS(ddr3_tip_if_write
124 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
125 TRAINING_SW_1_REG, 1 << 16, 1 << 16));
126
127 /* disable RL machine all Trn_CS[3:0] , [16:0] */
128
129 CHECK_STATUS(ddr3_tip_if_write
130 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
131 TRAINING_REG, 0, 0xf1ffff));
132
133 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
134 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
135 if ((data_read[if_id] & (1 << 30)) == 0) {
136 DEBUG_LEVELING(
137 DEBUG_LEVEL_ERROR,
138 ("\n_read Leveling failed for IF %d\n",
139 if_id));
140 training_result[training_stage][if_id] =
141 TEST_FAILED;
142 if (debug_mode == 0)
143 return MV_FAIL;
144 }
145 }
146 return MV_OK;
147 }
148
149 /* NEW RL machine */
150 for (effective_cs = 0; effective_cs < NUM_OF_CS; effective_cs++)
151 for (bus_num = 0; bus_num < MAX_BUS_NUM; bus_num++)
152 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++)
153 rl_values[effective_cs][bus_num][if_id] = 0;
154
155 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
156 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
157 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
158 training_result[training_stage][if_id] = TEST_SUCCESS;
159
160 /* save current cs enable reg val */
161 CHECK_STATUS(ddr3_tip_if_read
162 (dev_num, ACCESS_TYPE_UNICAST, if_id,
163 CS_ENABLE_REG, cs_enable_reg_val,
164 MASK_ALL_BITS));
165 /* enable single cs */
166 CHECK_STATUS(ddr3_tip_if_write
167 (dev_num, ACCESS_TYPE_UNICAST, if_id,
168 CS_ENABLE_REG, (1 << 3), (1 << 3)));
169 }
170
171 ddr3_tip_reset_fifo_ptr(dev_num);
172
173 /*
174 * Phase 1: Load pattern (using ODPG)
175 *
176 * enter Read Leveling mode
177 * only 27 bits are masked
178 * assuming non multi-CS configuration
179 * write to CS = 0 for the non multi CS configuration, note
180 * that the results shall be read back to the required CS !!!
181 */
182
183 /* BUS count is 0 shifted 26 */
184 CHECK_STATUS(ddr3_tip_if_write
185 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
186 ODPG_DATA_CONTROL_REG, 0x3, 0x3));
187 CHECK_STATUS(ddr3_tip_configure_odpg
188 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0,
189 pattern_table[PATTERN_RL].num_of_phases_tx, 0,
190 pattern_table[PATTERN_RL].num_of_phases_rx, 0, 0,
191 effective_cs, STRESS_NONE, DURATION_SINGLE));
192
193 /* load pattern to ODPG */
194 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
195 PARAM_NOT_CARE, PATTERN_RL,
196 pattern_table[PATTERN_RL].
197 start_addr);
198
199 /*
200 * Phase 2: ODPG to Read Leveling mode
201 */
202
203 /* General Training Opcode register */
204 CHECK_STATUS(ddr3_tip_if_write
205 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
206 ODPG_WRITE_READ_MODE_ENABLE_REG, 0,
207 MASK_ALL_BITS));
208
209 CHECK_STATUS(ddr3_tip_if_write
210 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
211 ODPG_TRAINING_CONTROL_REG,
212 (0x301b01 | effective_cs << 2), 0x3c3fef));
213
214 /* Object1 opcode register 0 & 1 */
215 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
216 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
217 speed_bin_index =
218 tm->interface_params[if_id].speed_bin_index;
219 cl_val =
220 cas_latency_table[speed_bin_index].cl_val[freq];
221 data = (cl_val << 17) | (0x3 << 25);
222 mask = (0xff << 9) | (0x1f << 17) | (0x3 << 25);
223 CHECK_STATUS(ddr3_tip_if_write
224 (dev_num, ACCESS_TYPE_UNICAST, if_id,
225 ODPG_OBJ1_OPCODE_REG, data, mask));
226 }
227
228 /* Set iteration count to max value */
229 CHECK_STATUS(ddr3_tip_if_write
230 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
231 TRAINING_OPCODE_1_REG, 0xd00, 0xd00));
232
233 /*
234 * Phase 2: Mask config
235 */
236
237 ddr3_tip_dynamic_read_leveling_seq(dev_num);
238
239 /*
240 * Phase 3: Read Leveling execution
241 */
242
243 /* temporary jira dunit=14751 */
244 CHECK_STATUS(ddr3_tip_if_write
245 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
246 TRAINING_DBG_1_REG, 0, (u32)(1 << 31)));
247 /* configure phy reset value */
248 CHECK_STATUS(ddr3_tip_if_write
249 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
250 TRAINING_DBG_3_REG, (0x7f << 24),
251 (u32)(0xff << 24)));
252 /* data pup rd reset enable */
253 CHECK_STATUS(ddr3_tip_if_write
254 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
255 SDRAM_CONFIGURATION_REG, 0, (1 << 30)));
256 /* data pup rd reset disable */
257 CHECK_STATUS(ddr3_tip_if_write
258 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
259 SDRAM_CONFIGURATION_REG, (1 << 30), (1 << 30)));
260 /* training SW override & training RL mode */
261 CHECK_STATUS(ddr3_tip_if_write
262 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
263 TRAINING_SW_2_REG, 0x1, 0x9));
264 /* training enable */
265 CHECK_STATUS(ddr3_tip_if_write
266 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
267 TRAINING_REG, (1 << 24) | (1 << 20),
268 (1 << 24) | (1 << 20)));
269 CHECK_STATUS(ddr3_tip_if_write
270 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
271 TRAINING_REG, (u32)(1 << 31), (u32)(1 << 31)));
272
273 /********* trigger training *******************/
274 /* Trigger, poll on status and disable ODPG */
275 CHECK_STATUS(ddr3_tip_if_write
276 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
277 ODPG_TRAINING_TRIGGER_REG, 0x1, 0x1));
278 CHECK_STATUS(ddr3_tip_if_write
279 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
280 ODPG_TRAINING_STATUS_REG, 0x1, 0x1));
281
282 /* check for training done + results pass */
283 if (ddr3_tip_if_polling
284 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x2, 0x2,
285 ODPG_TRAINING_STATUS_REG,
286 MAX_POLLING_ITERATIONS) != MV_OK) {
287 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
288 ("Training Done Failed\n"));
289 return MV_FAIL;
290 }
291
292 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
293 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
294 CHECK_STATUS(ddr3_tip_if_read
295 (dev_num, ACCESS_TYPE_UNICAST,
296 if_id,
297 ODPG_TRAINING_TRIGGER_REG, data_read,
298 0x4));
299 data = data_read[if_id];
300 if (data != 0x0) {
301 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
302 ("Training Result Failed\n"));
303 }
304 }
305
306 /*disable ODPG - Back to functional mode */
307 CHECK_STATUS(ddr3_tip_if_write
308 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
309 ODPG_ENABLE_REG, 0x1 << ODPG_DISABLE_OFFS,
310 (0x1 << ODPG_DISABLE_OFFS)));
311 if (ddr3_tip_if_polling
312 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x0, 0x1,
313 ODPG_ENABLE_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
314 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
315 ("ODPG disable failed "));
316 return MV_FAIL;
317 }
318 CHECK_STATUS(ddr3_tip_if_write
319 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
320 ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
321
322 /* double loop on bus, pup */
323 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
324 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
325 /* check training done */
326 is_any_pup_fail = 0;
327 for (bus_num = 0;
328 bus_num < tm->num_of_bus_per_interface;
329 bus_num++) {
330 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
331 if (ddr3_tip_if_polling
332 (dev_num, ACCESS_TYPE_UNICAST,
333 if_id, (1 << 25), (1 << 25),
334 mask_results_pup_reg_map[bus_num],
335 MAX_POLLING_ITERATIONS) != MV_OK) {
336 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
337 ("\n_r_l: DDR3 poll failed(2) for bus %d",
338 bus_num));
339 is_any_pup_fail = 1;
340 } else {
341 /* read result per pup */
342 CHECK_STATUS(ddr3_tip_if_read
343 (dev_num,
344 ACCESS_TYPE_UNICAST,
345 if_id,
346 mask_results_pup_reg_map
347 [bus_num], data_read,
348 0xff));
349 rl_values[effective_cs][bus_num]
350 [if_id] = (u8)data_read[if_id];
351 }
352 }
353
354 if (is_any_pup_fail == 1) {
355 training_result[training_stage][if_id] =
356 TEST_FAILED;
357 if (debug_mode == 0)
358 return MV_FAIL;
359 }
360 }
361
362 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("RL exit read leveling\n"));
363
364 /*
365 * Phase 3: Exit Read Leveling
366 */
367
368 CHECK_STATUS(ddr3_tip_if_write
369 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
370 TRAINING_SW_2_REG, (1 << 3), (1 << 3)));
371 CHECK_STATUS(ddr3_tip_if_write
372 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
373 TRAINING_SW_1_REG, (1 << 16), (1 << 16)));
374 /* set ODPG to functional */
375 CHECK_STATUS(ddr3_tip_if_write
376 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
377 ODPG_DATA_CONTROL_REG, 0x0, MASK_ALL_BITS));
378
379 /*
380 * Copy the result from the effective CS search to the
381 * real Functional CS
382 */
383 /*ddr3_tip_write_cs_result(dev_num, RL_PHY_REG); */
384 CHECK_STATUS(ddr3_tip_if_write
385 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
386 ODPG_DATA_CONTROL_REG, 0x0, MASK_ALL_BITS));
387 }
388
389 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
390 /* double loop on bus, pup */
391 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
392 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
393 for (bus_num = 0;
394 bus_num < tm->num_of_bus_per_interface;
395 bus_num++) {
396 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
397 /* read result per pup from arry */
398 data = rl_values[effective_cs][bus_num][if_id];
399 data = (data & 0x1f) |
400 (((data & 0xe0) >> 5) << 6);
401 ddr3_tip_bus_write(dev_num,
402 ACCESS_TYPE_UNICAST,
403 if_id,
404 ACCESS_TYPE_UNICAST,
405 bus_num, DDR_PHY_DATA,
406 RL_PHY_REG +
407 ((effective_cs ==
408 0) ? 0x0 : 0x4), data);
409 }
410 }
411 }
412 /* Set to 0 after each loop to avoid illegal value may be used */
413 effective_cs = 0;
414
415 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
416 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
417 /* restore cs enable value */
418 CHECK_STATUS(ddr3_tip_if_write
419 (dev_num, ACCESS_TYPE_UNICAST, if_id,
420 CS_ENABLE_REG, cs_enable_reg_val[if_id],
421 MASK_ALL_BITS));
422 if (odt_config != 0) {
423 CHECK_STATUS(ddr3_tip_write_additional_odt_setting
424 (dev_num, if_id));
425 }
426 }
427
428 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
429 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
430 if (training_result[training_stage][if_id] == TEST_FAILED)
431 return MV_FAIL;
432 }
433
434 return MV_OK;
435}
436
437/*
438 * Legacy Dynamic write leveling
439 */
440int ddr3_tip_legacy_dynamic_write_leveling(u32 dev_num)
441{
442 u32 c_cs, if_id, cs_mask = 0;
443 u32 max_cs = hws_ddr3_tip_max_cs_get();
444 struct hws_topology_map *tm = ddr3_get_topology_map();
445
446 /*
447 * In TRAINIUNG reg (0x15b0) write 0x80000008 | cs_mask:
448 * Trn_start
449 * cs_mask = 0x1 <<20 Trn_CS0 - CS0 is included in the DDR3 training
450 * cs_mask = 0x1 <<21 Trn_CS1 - CS1 is included in the DDR3 training
451 * cs_mask = 0x1 <<22 Trn_CS2 - CS2 is included in the DDR3 training
452 * cs_mask = 0x1 <<23 Trn_CS3 - CS3 is included in the DDR3 training
453 * Trn_auto_seq = write leveling
454 */
455 for (c_cs = 0; c_cs < max_cs; c_cs++)
456 cs_mask = cs_mask | 1 << (20 + c_cs);
457
458 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
459 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
460 CHECK_STATUS(ddr3_tip_if_write
461 (dev_num, ACCESS_TYPE_MULTICAST, 0,
462 TRAINING_REG, (0x80000008 | cs_mask),
463 0xffffffff));
464 mdelay(20);
465 if (ddr3_tip_if_polling
466 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
467 (u32)0x80000000, TRAINING_REG,
468 MAX_POLLING_ITERATIONS) != MV_OK) {
469 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
470 ("polling failed for Old WL result\n"));
471 return MV_FAIL;
472 }
473 }
474
475 return MV_OK;
476}
477
478/*
479 * Legacy Dynamic read leveling
480 */
481int ddr3_tip_legacy_dynamic_read_leveling(u32 dev_num)
482{
483 u32 c_cs, if_id, cs_mask = 0;
484 u32 max_cs = hws_ddr3_tip_max_cs_get();
485 struct hws_topology_map *tm = ddr3_get_topology_map();
486
487 /*
488 * In TRAINIUNG reg (0x15b0) write 0x80000040 | cs_mask:
489 * Trn_start
490 * cs_mask = 0x1 <<20 Trn_CS0 - CS0 is included in the DDR3 training
491 * cs_mask = 0x1 <<21 Trn_CS1 - CS1 is included in the DDR3 training
492 * cs_mask = 0x1 <<22 Trn_CS2 - CS2 is included in the DDR3 training
493 * cs_mask = 0x1 <<23 Trn_CS3 - CS3 is included in the DDR3 training
494 * Trn_auto_seq = Read Leveling using training pattern
495 */
496 for (c_cs = 0; c_cs < max_cs; c_cs++)
497 cs_mask = cs_mask | 1 << (20 + c_cs);
498
499 CHECK_STATUS(ddr3_tip_if_write
500 (dev_num, ACCESS_TYPE_MULTICAST, 0, TRAINING_REG,
501 (0x80000040 | cs_mask), 0xffffffff));
502 mdelay(100);
503
504 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
505 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
506 if (ddr3_tip_if_polling
507 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
508 (u32)0x80000000, TRAINING_REG,
509 MAX_POLLING_ITERATIONS) != MV_OK) {
510 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
511 ("polling failed for Old RL result\n"));
512 return MV_FAIL;
513 }
514 }
515
516 return MV_OK;
517}
518
519/*
520 * Dynamic per bit read leveling
521 */
522int ddr3_tip_dynamic_per_bit_read_leveling(u32 dev_num, u32 freq)
523{
524 u32 data, mask;
525 u32 bus_num, if_id, cl_val, bit_num;
526 u32 curr_numb, curr_min_delay;
527 int adll_array[3] = { 0, -0xa, 0x14 };
528 u32 phyreg3_arr[MAX_INTERFACE_NUM][MAX_BUS_NUM];
529 enum hws_speed_bin speed_bin_index;
530 int is_any_pup_fail = 0;
531 int break_loop = 0;
532 u32 cs_enable_reg_val[MAX_INTERFACE_NUM]; /* save current CS value */
533 u32 data_read[MAX_INTERFACE_NUM];
534 int per_bit_rl_pup_status[MAX_INTERFACE_NUM][MAX_BUS_NUM];
535 u32 data2_write[MAX_INTERFACE_NUM][MAX_BUS_NUM];
536 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
537 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
538 struct hws_topology_map *tm = ddr3_get_topology_map();
539
540 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
541 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
542 for (bus_num = 0;
543 bus_num <= tm->num_of_bus_per_interface; bus_num++) {
544 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
545 per_bit_rl_pup_status[if_id][bus_num] = 0;
546 data2_write[if_id][bus_num] = 0;
547 /* read current value of phy register 0x3 */
548 CHECK_STATUS(ddr3_tip_bus_read
549 (dev_num, if_id, ACCESS_TYPE_UNICAST,
550 bus_num, DDR_PHY_DATA,
551 READ_CENTRALIZATION_PHY_REG,
552 &phyreg3_arr[if_id][bus_num]));
553 }
554 }
555
556 /* NEW RL machine */
557 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
558 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
559 training_result[training_stage][if_id] = TEST_SUCCESS;
560
561 /* save current cs enable reg val */
562 CHECK_STATUS(ddr3_tip_if_read
563 (dev_num, ACCESS_TYPE_UNICAST, if_id,
564 CS_ENABLE_REG, &cs_enable_reg_val[if_id],
565 MASK_ALL_BITS));
566 /* enable single cs */
567 CHECK_STATUS(ddr3_tip_if_write
568 (dev_num, ACCESS_TYPE_UNICAST, if_id,
569 CS_ENABLE_REG, (1 << 3), (1 << 3)));
570 }
571
572 ddr3_tip_reset_fifo_ptr(dev_num);
573 for (curr_numb = 0; curr_numb < 3; curr_numb++) {
574 /*
575 * Phase 1: Load pattern (using ODPG)
576 *
577 * enter Read Leveling mode
578 * only 27 bits are masked
579 * assuming non multi-CS configuration
580 * write to CS = 0 for the non multi CS configuration, note that
581 * the results shall be read back to the required CS !!!
582 */
583
584 /* BUS count is 0 shifted 26 */
585 CHECK_STATUS(ddr3_tip_if_write
586 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
587 ODPG_DATA_CONTROL_REG, 0x3, 0x3));
588 CHECK_STATUS(ddr3_tip_configure_odpg
589 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0,
590 pattern_table[PATTERN_TEST].num_of_phases_tx, 0,
591 pattern_table[PATTERN_TEST].num_of_phases_rx, 0,
592 0, 0, STRESS_NONE, DURATION_SINGLE));
593
594 /* load pattern to ODPG */
595 ddr3_tip_load_pattern_to_odpg(dev_num, ACCESS_TYPE_MULTICAST,
596 PARAM_NOT_CARE, PATTERN_TEST,
597 pattern_table[PATTERN_TEST].
598 start_addr);
599
600 /*
601 * Phase 2: ODPG to Read Leveling mode
602 */
603
604 /* General Training Opcode register */
605 CHECK_STATUS(ddr3_tip_if_write
606 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
607 ODPG_WRITE_READ_MODE_ENABLE_REG, 0,
608 MASK_ALL_BITS));
609 CHECK_STATUS(ddr3_tip_if_write
610 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
611 ODPG_TRAINING_CONTROL_REG, 0x301b01, 0x3c3fef));
612
613 /* Object1 opcode register 0 & 1 */
614 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
615 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
616 speed_bin_index =
617 tm->interface_params[if_id].speed_bin_index;
618 cl_val =
619 cas_latency_table[speed_bin_index].cl_val[freq];
620 data = (cl_val << 17) | (0x3 << 25);
621 mask = (0xff << 9) | (0x1f << 17) | (0x3 << 25);
622 CHECK_STATUS(ddr3_tip_if_write
623 (dev_num, ACCESS_TYPE_UNICAST, if_id,
624 ODPG_OBJ1_OPCODE_REG, data, mask));
625 }
626
627 /* Set iteration count to max value */
628 CHECK_STATUS(ddr3_tip_if_write
629 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
630 TRAINING_OPCODE_1_REG, 0xd00, 0xd00));
631
632 /*
633 * Phase 2: Mask config
634 */
635
636 ddr3_tip_dynamic_per_bit_read_leveling_seq(dev_num);
637
638 /*
639 * Phase 3: Read Leveling execution
640 */
641
642 /* temporary jira dunit=14751 */
643 CHECK_STATUS(ddr3_tip_if_write
644 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
645 TRAINING_DBG_1_REG, 0, (u32)(1 << 31)));
646 /* configure phy reset value */
647 CHECK_STATUS(ddr3_tip_if_write
648 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
649 TRAINING_DBG_3_REG, (0x7f << 24),
650 (u32)(0xff << 24)));
651 /* data pup rd reset enable */
652 CHECK_STATUS(ddr3_tip_if_write
653 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
654 SDRAM_CONFIGURATION_REG, 0, (1 << 30)));
655 /* data pup rd reset disable */
656 CHECK_STATUS(ddr3_tip_if_write
657 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
658 SDRAM_CONFIGURATION_REG, (1 << 30), (1 << 30)));
659 /* training SW override & training RL mode */
660 CHECK_STATUS(ddr3_tip_if_write
661 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
662 TRAINING_SW_2_REG, 0x1, 0x9));
663 /* training enable */
664 CHECK_STATUS(ddr3_tip_if_write
665 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
666 TRAINING_REG, (1 << 24) | (1 << 20),
667 (1 << 24) | (1 << 20)));
668 CHECK_STATUS(ddr3_tip_if_write
669 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
670 TRAINING_REG, (u32)(1 << 31), (u32)(1 << 31)));
671
672 /********* trigger training *******************/
673 /* Trigger, poll on status and disable ODPG */
674 CHECK_STATUS(ddr3_tip_if_write
675 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
676 ODPG_TRAINING_TRIGGER_REG, 0x1, 0x1));
677 CHECK_STATUS(ddr3_tip_if_write
678 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
679 ODPG_TRAINING_STATUS_REG, 0x1, 0x1));
680
681 /*check for training done + results pass */
682 if (ddr3_tip_if_polling
683 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x2, 0x2,
684 ODPG_TRAINING_STATUS_REG,
685 MAX_POLLING_ITERATIONS) != MV_OK) {
686 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
687 ("Training Done Failed\n"));
688 return MV_FAIL;
689 }
690
691 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
692 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
693 CHECK_STATUS(ddr3_tip_if_read
694 (dev_num, ACCESS_TYPE_UNICAST,
695 if_id,
696 ODPG_TRAINING_TRIGGER_REG, data_read,
697 0x4));
698 data = data_read[if_id];
699 if (data != 0x0) {
700 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
701 ("Training Result Failed\n"));
702 }
703 }
704
705 /*disable ODPG - Back to functional mode */
706 CHECK_STATUS(ddr3_tip_if_write
707 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
708 ODPG_ENABLE_REG, 0x1 << ODPG_DISABLE_OFFS,
709 (0x1 << ODPG_DISABLE_OFFS)));
710 if (ddr3_tip_if_polling
711 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x0, 0x1,
712 ODPG_ENABLE_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
713 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
714 ("ODPG disable failed "));
715 return MV_FAIL;
716 }
717 CHECK_STATUS(ddr3_tip_if_write
718 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
719 ODPG_DATA_CONTROL_REG, 0, MASK_ALL_BITS));
720
721 /* double loop on bus, pup */
722 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
723 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
724 /* check training done */
725 for (bus_num = 0;
726 bus_num < tm->num_of_bus_per_interface;
727 bus_num++) {
728 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
729
730 if (per_bit_rl_pup_status[if_id][bus_num]
731 == 0) {
732 curr_min_delay = 0;
733 for (bit_num = 0; bit_num < 8;
734 bit_num++) {
735 if (ddr3_tip_if_polling
736 (dev_num,
737 ACCESS_TYPE_UNICAST,
738 if_id, (1 << 25),
739 (1 << 25),
740 mask_results_dq_reg_map
741 [bus_num * 8 + bit_num],
742 MAX_POLLING_ITERATIONS) !=
743 MV_OK) {
744 DEBUG_LEVELING
745 (DEBUG_LEVEL_ERROR,
746 ("\n_r_l: DDR3 poll failed(2) for bus %d bit %d\n",
747 bus_num,
748 bit_num));
749 } else {
750 /* read result per pup */
751 CHECK_STATUS
752 (ddr3_tip_if_read
753 (dev_num,
754 ACCESS_TYPE_UNICAST,
755 if_id,
756 mask_results_dq_reg_map
757 [bus_num * 8 +
758 bit_num],
759 data_read,
760 MASK_ALL_BITS));
761 data =
762 (data_read
763 [if_id] &
764 0x1f) |
765 ((data_read
766 [if_id] &
767 0xe0) << 1);
768 if (curr_min_delay == 0)
769 curr_min_delay =
770 data;
771 else if (data <
772 curr_min_delay)
773 curr_min_delay =
774 data;
775 if (data > data2_write[if_id][bus_num])
776 data2_write
777 [if_id]
778 [bus_num] =
779 data;
780 }
781 }
782
783 if (data2_write[if_id][bus_num] <=
784 (curr_min_delay +
785 MAX_DQ_READ_LEVELING_DELAY)) {
786 per_bit_rl_pup_status[if_id]
787 [bus_num] = 1;
788 }
789 }
790 }
791 }
792
793 /* check if there is need to search new phyreg3 value */
794 if (curr_numb < 2) {
795 /* if there is DLL that is not checked yet */
796 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1;
797 if_id++) {
798 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
799 for (bus_num = 0;
800 bus_num < tm->num_of_bus_per_interface;
801 bus_num++) {
802 VALIDATE_ACTIVE(tm->bus_act_mask,
803 bus_num);
804 if (per_bit_rl_pup_status[if_id]
805 [bus_num] != 1) {
806 /* go to next ADLL value */
807 CHECK_STATUS
808 (ddr3_tip_bus_write
809 (dev_num,
810 ACCESS_TYPE_UNICAST,
811 if_id,
812 ACCESS_TYPE_UNICAST,
813 bus_num, DDR_PHY_DATA,
814 READ_CENTRALIZATION_PHY_REG,
815 (phyreg3_arr[if_id]
816 [bus_num] +
817 adll_array[curr_numb])));
818 break_loop = 1;
819 break;
820 }
821 }
822 if (break_loop)
823 break;
824 }
825 } /* if (curr_numb < 2) */
826 if (!break_loop)
827 break;
828 } /* for ( curr_numb = 0; curr_numb <3; curr_numb++) */
829
830 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
831 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
832 for (bus_num = 0; bus_num < tm->num_of_bus_per_interface;
833 bus_num++) {
834 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
835 if (per_bit_rl_pup_status[if_id][bus_num] == 1)
836 ddr3_tip_bus_write(dev_num,
837 ACCESS_TYPE_UNICAST,
838 if_id,
839 ACCESS_TYPE_UNICAST,
840 bus_num, DDR_PHY_DATA,
841 RL_PHY_REG +
842 CS_REG_VALUE(effective_cs),
843 data2_write[if_id]
844 [bus_num]);
845 else
846 is_any_pup_fail = 1;
847 }
848
849 /* TBD flow does not support multi CS */
850 /*
851 * cs_bitmask = tm->interface_params[if_id].
852 * as_bus_params[bus_num].cs_bitmask;
853 */
854 /* divide by 4 is used for retrieving the CS number */
855 /*
856 * TBD BC2 - what is the PHY address for other
857 * CS ddr3_tip_write_cs_result() ???
858 */
859 /*
860 * find what should be written to PHY
861 * - max delay that is less than threshold
862 */
863 if (is_any_pup_fail == 1) {
864 training_result[training_stage][if_id] = TEST_FAILED;
865 if (debug_mode == 0)
866 return MV_FAIL;
867 }
868 }
869 DEBUG_LEVELING(DEBUG_LEVEL_INFO, ("RL exit read leveling\n"));
870
871 /*
872 * Phase 3: Exit Read Leveling
873 */
874
875 CHECK_STATUS(ddr3_tip_if_write
876 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
877 TRAINING_SW_2_REG, (1 << 3), (1 << 3)));
878 CHECK_STATUS(ddr3_tip_if_write
879 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
880 TRAINING_SW_1_REG, (1 << 16), (1 << 16)));
881 /* set ODPG to functional */
882 CHECK_STATUS(ddr3_tip_if_write
883 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
884 ODPG_DATA_CONTROL_REG, 0x0, MASK_ALL_BITS));
885 /*
886 * Copy the result from the effective CS search to the real
887 * Functional CS
888 */
889 ddr3_tip_write_cs_result(dev_num, RL_PHY_REG);
890 CHECK_STATUS(ddr3_tip_if_write
891 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
892 ODPG_DATA_CONTROL_REG, 0x0, MASK_ALL_BITS));
893
894 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
895 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
896 /* restore cs enable value */
897 CHECK_STATUS(ddr3_tip_if_write
898 (dev_num, ACCESS_TYPE_UNICAST, if_id,
899 CS_ENABLE_REG, cs_enable_reg_val[if_id],
900 MASK_ALL_BITS));
901 if (odt_config != 0) {
902 CHECK_STATUS(ddr3_tip_write_additional_odt_setting
903 (dev_num, if_id));
904 }
905 }
906
907 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
908 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
909 if (training_result[training_stage][if_id] == TEST_FAILED)
910 return MV_FAIL;
911 }
912
913 return MV_OK;
914}
915
916int ddr3_tip_calc_cs_mask(u32 dev_num, u32 if_id, u32 effective_cs,
917 u32 *cs_mask)
918{
919 u32 all_bus_cs = 0, same_bus_cs;
920 u32 bus_cnt;
921 struct hws_topology_map *tm = ddr3_get_topology_map();
922
923 *cs_mask = same_bus_cs = CS_BIT_MASK;
924
925 /*
926 * In some of the devices (such as BC2), the CS is per pup and there
927 * for mixed mode is valid on like other devices where CS configuration
928 * is per interface.
929 * In order to know that, we do 'Or' and 'And' operation between all
930 * CS (of the pups).
931 * If they are they are not the same then it's mixed mode so all CS
932 * should be configured (when configuring the MRS)
933 */
934 for (bus_cnt = 0; bus_cnt < tm->num_of_bus_per_interface; bus_cnt++) {
935 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
936
937 all_bus_cs |= tm->interface_params[if_id].
938 as_bus_params[bus_cnt].cs_bitmask;
939 same_bus_cs &= tm->interface_params[if_id].
940 as_bus_params[bus_cnt].cs_bitmask;
941
942 /* cs enable is active low */
943 *cs_mask &= ~tm->interface_params[if_id].
944 as_bus_params[bus_cnt].cs_bitmask;
945 }
946
947 if (all_bus_cs == same_bus_cs)
948 *cs_mask = (*cs_mask | (~(1 << effective_cs))) & CS_BIT_MASK;
949
950 return MV_OK;
951}
952
953/*
954 * Dynamic write leveling
955 */
956int ddr3_tip_dynamic_write_leveling(u32 dev_num)
957{
958 u32 reg_data = 0, iter, if_id, bus_cnt;
959 u32 cs_enable_reg_val[MAX_INTERFACE_NUM] = { 0 };
960 u32 cs_mask[MAX_INTERFACE_NUM];
961 u32 read_data_sample_delay_vals[MAX_INTERFACE_NUM] = { 0 };
962 u32 read_data_ready_delay_vals[MAX_INTERFACE_NUM] = { 0 };
963 /* 0 for failure */
964 u32 res_values[MAX_INTERFACE_NUM * MAX_BUS_NUM] = { 0 };
965 u32 test_res = 0; /* 0 - success for all pup */
966 u32 data_read[MAX_INTERFACE_NUM];
967 u8 wl_values[NUM_OF_CS][MAX_BUS_NUM][MAX_INTERFACE_NUM];
968 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
969 u32 cs_mask0[MAX_INTERFACE_NUM] = { 0 };
970 u32 max_cs = hws_ddr3_tip_max_cs_get();
971 struct hws_topology_map *tm = ddr3_get_topology_map();
972
973 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
974 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
975
976 training_result[training_stage][if_id] = TEST_SUCCESS;
977
978 /* save Read Data Sample Delay */
979 CHECK_STATUS(ddr3_tip_if_read
980 (dev_num, ACCESS_TYPE_UNICAST, if_id,
981 READ_DATA_SAMPLE_DELAY,
982 read_data_sample_delay_vals, MASK_ALL_BITS));
983 /* save Read Data Ready Delay */
984 CHECK_STATUS(ddr3_tip_if_read
985 (dev_num, ACCESS_TYPE_UNICAST, if_id,
986 READ_DATA_READY_DELAY, read_data_ready_delay_vals,
987 MASK_ALL_BITS));
988 /* save current cs reg val */
989 CHECK_STATUS(ddr3_tip_if_read
990 (dev_num, ACCESS_TYPE_UNICAST, if_id,
991 CS_ENABLE_REG, cs_enable_reg_val, MASK_ALL_BITS));
992 }
993
994 /*
995 * Phase 1: DRAM 2 Write Leveling mode
996 */
997
998 /*Assert 10 refresh commands to DRAM to all CS */
999 for (iter = 0; iter < WL_ITERATION_NUM; iter++) {
1000 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1001 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1002 CHECK_STATUS(ddr3_tip_if_write
1003 (dev_num, ACCESS_TYPE_UNICAST,
1004 if_id, SDRAM_OPERATION_REG,
1005 (u32)((~(0xf) << 8) | 0x2), 0xf1f));
1006 }
1007 }
1008 /* check controller back to normal */
1009 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1010 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1011 if (ddr3_tip_if_polling
1012 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f,
1013 SDRAM_OPERATION_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
1014 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
1015 ("WL: DDR3 poll failed(3)"));
1016 }
1017 }
1018
1019 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
1020 /*enable write leveling to all cs - Q off , WL n */
1021 /* calculate interface cs mask */
1022 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MRS1_CMD,
1023 0x1000, 0x1080));
1024
1025 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1026 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1027 /* cs enable is active low */
1028 ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs,
1029 &cs_mask[if_id]);
1030 }
1031
1032 /* Enable Output buffer to relevant CS - Q on , WL on */
1033 CHECK_STATUS(ddr3_tip_write_mrs_cmd
1034 (dev_num, cs_mask, MRS1_CMD, 0x80, 0x1080));
1035
1036 /*enable odt for relevant CS */
1037 CHECK_STATUS(ddr3_tip_if_write
1038 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1039 0x1498, (0x3 << (effective_cs * 2)), 0xf));
1040
1041 /*
1042 * Phase 2: Set training IP to write leveling mode
1043 */
1044
1045 CHECK_STATUS(ddr3_tip_dynamic_write_leveling_seq(dev_num));
1046
1047 /*
1048 * Phase 3: Trigger training
1049 */
1050
1051 CHECK_STATUS(ddr3_tip_if_write
1052 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1053 ODPG_TRAINING_TRIGGER_REG, 0x1, 0x1));
1054
1055 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1056 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1057
1058 /* training done */
1059 if (ddr3_tip_if_polling
1060 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1061 (1 << 1), (1 << 1), ODPG_TRAINING_STATUS_REG,
1062 MAX_POLLING_ITERATIONS) != MV_OK) {
1063 DEBUG_LEVELING(
1064 DEBUG_LEVEL_ERROR,
1065 ("WL: DDR3 poll (4) failed (Data: 0x%x)\n",
1066 reg_data));
1067 }
1068#if !defined(CONFIG_ARMADA_38X) /*Disabled. JIRA #1498 */
1069 else {
1070 CHECK_STATUS(ddr3_tip_if_read
1071 (dev_num, ACCESS_TYPE_UNICAST,
1072 if_id,
1073 ODPG_TRAINING_TRIGGER_REG,
1074 &reg_data, (1 << 2)));
1075 if (reg_data != 0) {
1076 DEBUG_LEVELING(
1077 DEBUG_LEVEL_ERROR,
1078 ("WL: WL failed IF %d reg_data=0x%x\n",
1079 if_id, reg_data));
1080 }
1081 }
1082#endif
1083 }
1084
1085 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1086 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1087 /* training done */
1088 if (ddr3_tip_if_polling
1089 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1090 (1 << 1), (1 << 1), ODPG_TRAINING_STATUS_REG,
1091 MAX_POLLING_ITERATIONS) != MV_OK) {
1092 DEBUG_LEVELING(
1093 DEBUG_LEVEL_ERROR,
1094 ("WL: DDR3 poll (4) failed (Data: 0x%x)\n",
1095 reg_data));
1096 } else {
1097#if !defined(CONFIG_ARMADA_38X) /*Disabled. JIRA #1498 */
1098 CHECK_STATUS(ddr3_tip_if_read
1099 (dev_num, ACCESS_TYPE_UNICAST,
1100 if_id,
1101 ODPG_TRAINING_STATUS_REG,
1102 data_read, (1 << 2)));
1103 reg_data = data_read[if_id];
1104 if (reg_data != 0) {
1105 DEBUG_LEVELING(
1106 DEBUG_LEVEL_ERROR,
1107 ("WL: WL failed IF %d reg_data=0x%x\n",
1108 if_id, reg_data));
1109 }
1110#endif
1111
1112 /* check for training completion per bus */
1113 for (bus_cnt = 0;
1114 bus_cnt < tm->num_of_bus_per_interface;
1115 bus_cnt++) {
1116 VALIDATE_ACTIVE(tm->bus_act_mask,
1117 bus_cnt);
1118 /* training status */
1119 CHECK_STATUS(ddr3_tip_if_read
1120 (dev_num,
1121 ACCESS_TYPE_UNICAST,
1122 if_id,
1123 mask_results_pup_reg_map
1124 [bus_cnt], data_read,
1125 (1 << 25)));
1126 reg_data = data_read[if_id];
1127 DEBUG_LEVELING(
1128 DEBUG_LEVEL_TRACE,
1129 ("WL: IF %d BUS %d reg 0x%x\n",
1130 if_id, bus_cnt, reg_data));
1131 if (reg_data == 0) {
1132 res_values[
1133 (if_id *
1134 tm->num_of_bus_per_interface)
1135 + bus_cnt] = 1;
1136 }
1137 CHECK_STATUS(ddr3_tip_if_read
1138 (dev_num,
1139 ACCESS_TYPE_UNICAST,
1140 if_id,
1141 mask_results_pup_reg_map
1142 [bus_cnt], data_read,
1143 0xff));
1144 /*
1145 * Save the read value that should be
1146 * write to PHY register
1147 */
1148 wl_values[effective_cs]
1149 [bus_cnt][if_id] =
1150 (u8)data_read[if_id];
1151 }
1152 }
1153 }
1154
1155 /*
1156 * Phase 4: Exit write leveling mode
1157 */
1158
1159 /* disable DQs toggling */
1160 CHECK_STATUS(ddr3_tip_if_write
1161 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1162 WR_LEVELING_DQS_PATTERN_REG, 0x0, 0x1));
1163
1164 /* Update MRS 1 (WL off) */
1165 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask0, MRS1_CMD,
1166 0x1000, 0x1080));
1167
1168 /* Update MRS 1 (return to functional mode - Q on , WL off) */
1169 CHECK_STATUS(ddr3_tip_write_mrs_cmd
1170 (dev_num, cs_mask0, MRS1_CMD, 0x0, 0x1080));
1171
1172 /* set phy to normal mode */
1173 CHECK_STATUS(ddr3_tip_if_write
1174 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1175 TRAINING_SW_2_REG, 0x5, 0x7));
1176
1177 /* exit sw override mode */
1178 CHECK_STATUS(ddr3_tip_if_write
1179 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1180 TRAINING_SW_2_REG, 0x4, 0x7));
1181 }
1182
1183 /*
1184 * Phase 5: Load WL values to each PHY
1185 */
1186
1187 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
1188 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1189 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1190 test_res = 0;
1191 for (bus_cnt = 0;
1192 bus_cnt < tm->num_of_bus_per_interface;
1193 bus_cnt++) {
1194 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
1195 /* check if result == pass */
1196 if (res_values
1197 [(if_id *
1198 tm->num_of_bus_per_interface) +
1199 bus_cnt] == 0) {
1200 /*
1201 * read result control register
1202 * according to pup
1203 */
1204 reg_data =
1205 wl_values[effective_cs][bus_cnt]
1206 [if_id];
1207 /*
1208 * Write into write leveling register
1209 * ([4:0] ADLL, [8:6] Phase, [15:10]
1210 * (centralization) ADLL + 0x10)
1211 */
1212 reg_data =
1213 (reg_data & 0x1f) |
1214 (((reg_data & 0xe0) >> 5) << 6) |
1215 (((reg_data & 0x1f) +
1216 phy_reg1_val) << 10);
1217 ddr3_tip_bus_write(
1218 dev_num,
1219 ACCESS_TYPE_UNICAST,
1220 if_id,
1221 ACCESS_TYPE_UNICAST,
1222 bus_cnt,
1223 DDR_PHY_DATA,
1224 WL_PHY_REG +
1225 effective_cs *
1226 CS_REGISTER_ADDR_OFFSET,
1227 reg_data);
1228 } else {
1229 test_res = 1;
1230 /*
1231 * read result control register
1232 * according to pup
1233 */
1234 CHECK_STATUS(ddr3_tip_if_read
1235 (dev_num,
1236 ACCESS_TYPE_UNICAST,
1237 if_id,
1238 mask_results_pup_reg_map
1239 [bus_cnt], data_read,
1240 0xff));
1241 reg_data = data_read[if_id];
1242 DEBUG_LEVELING(
1243 DEBUG_LEVEL_ERROR,
1244 ("WL: IF %d BUS %d failed, reg 0x%x\n",
1245 if_id, bus_cnt, reg_data));
1246 }
1247 }
1248
1249 if (test_res != 0) {
1250 training_result[training_stage][if_id] =
1251 TEST_FAILED;
1252 }
1253 }
1254 }
1255 /* Set to 0 after each loop to avoid illegal value may be used */
1256 effective_cs = 0;
1257
1258 /*
1259 * Copy the result from the effective CS search to the real
1260 * Functional CS
1261 */
1262 /* ddr3_tip_write_cs_result(dev_num, WL_PHY_REG); */
1263 /* restore saved values */
1264 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1265 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1266 /* restore Read Data Sample Delay */
1267 CHECK_STATUS(ddr3_tip_if_write
1268 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1269 READ_DATA_SAMPLE_DELAY,
1270 read_data_sample_delay_vals[if_id],
1271 MASK_ALL_BITS));
1272
1273 /* restore Read Data Ready Delay */
1274 CHECK_STATUS(ddr3_tip_if_write
1275 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1276 READ_DATA_READY_DELAY,
1277 read_data_ready_delay_vals[if_id],
1278 MASK_ALL_BITS));
1279
1280 /* enable multi cs */
1281 CHECK_STATUS(ddr3_tip_if_write
1282 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1283 CS_ENABLE_REG, cs_enable_reg_val[if_id],
1284 MASK_ALL_BITS));
1285 }
1286
1287 /* Disable modt0 for CS0 training - need to adjust for multy CS */
1288 CHECK_STATUS(ddr3_tip_if_write
1289 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, 0x1498,
1290 0x0, 0xf));
1291
1292 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1293 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1294 if (training_result[training_stage][if_id] == TEST_FAILED)
1295 return MV_FAIL;
1296 }
1297
1298 return MV_OK;
1299}
1300
1301/*
1302 * Dynamic write leveling supplementary
1303 */
1304int ddr3_tip_dynamic_write_leveling_supp(u32 dev_num)
1305{
1306 int adll_offset;
1307 u32 if_id, bus_id, data, data_tmp;
1308 int is_if_fail = 0;
1309 struct hws_topology_map *tm = ddr3_get_topology_map();
1310
1311 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1312 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1313 is_if_fail = 0;
1314
1315 for (bus_id = 0; bus_id < GET_TOPOLOGY_NUM_OF_BUSES();
1316 bus_id++) {
1317 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1318 wr_supp_res[if_id][bus_id].is_pup_fail = 1;
1319 CHECK_STATUS(ddr3_tip_bus_read
1320 (dev_num, if_id, ACCESS_TYPE_UNICAST,
1321 bus_id, DDR_PHY_DATA,
1322 WRITE_CENTRALIZATION_PHY_REG +
1323 effective_cs * CS_REGISTER_ADDR_OFFSET,
1324 &data));
1325 DEBUG_LEVELING(
1326 DEBUG_LEVEL_TRACE,
1327 ("WL Supp: adll_offset=0 data delay = %d\n",
1328 data));
1329 if (ddr3_tip_wl_supp_align_phase_shift
1330 (dev_num, if_id, bus_id, 0, 0) == MV_OK) {
1331 DEBUG_LEVELING(
1332 DEBUG_LEVEL_TRACE,
1333 ("WL Supp: IF %d bus_id %d adll_offset=0 Success !\n",
1334 if_id, bus_id));
1335 continue;
1336 }
1337
1338 /* change adll */
1339 adll_offset = 5;
1340 CHECK_STATUS(ddr3_tip_bus_write
1341 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1342 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA,
1343 WRITE_CENTRALIZATION_PHY_REG +
1344 effective_cs * CS_REGISTER_ADDR_OFFSET,
1345 data + adll_offset));
1346 CHECK_STATUS(ddr3_tip_bus_read
1347 (dev_num, if_id, ACCESS_TYPE_UNICAST,
1348 bus_id, DDR_PHY_DATA,
1349 WRITE_CENTRALIZATION_PHY_REG +
1350 effective_cs * CS_REGISTER_ADDR_OFFSET,
1351 &data_tmp));
1352 DEBUG_LEVELING(
1353 DEBUG_LEVEL_TRACE,
1354 ("WL Supp: adll_offset= %d data delay = %d\n",
1355 adll_offset, data_tmp));
1356
1357 if (ddr3_tip_wl_supp_align_phase_shift
1358 (dev_num, if_id, bus_id, adll_offset, 0) == MV_OK) {
1359 DEBUG_LEVELING(
1360 DEBUG_LEVEL_TRACE,
1361 ("WL Supp: IF %d bus_id %d adll_offset= %d Success !\n",
1362 if_id, bus_id, adll_offset));
1363 continue;
1364 }
1365
1366 /* change adll */
1367 adll_offset = -5;
1368 CHECK_STATUS(ddr3_tip_bus_write
1369 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1370 ACCESS_TYPE_UNICAST, bus_id, DDR_PHY_DATA,
1371 WRITE_CENTRALIZATION_PHY_REG +
1372 effective_cs * CS_REGISTER_ADDR_OFFSET,
1373 data + adll_offset));
1374 CHECK_STATUS(ddr3_tip_bus_read
1375 (dev_num, if_id, ACCESS_TYPE_UNICAST,
1376 bus_id, DDR_PHY_DATA,
1377 WRITE_CENTRALIZATION_PHY_REG +
1378 effective_cs * CS_REGISTER_ADDR_OFFSET,
1379 &data_tmp));
1380 DEBUG_LEVELING(
1381 DEBUG_LEVEL_TRACE,
1382 ("WL Supp: adll_offset= %d data delay = %d\n",
1383 adll_offset, data_tmp));
1384 if (ddr3_tip_wl_supp_align_phase_shift
1385 (dev_num, if_id, bus_id, adll_offset, 0) == MV_OK) {
1386 DEBUG_LEVELING(
1387 DEBUG_LEVEL_TRACE,
1388 ("WL Supp: IF %d bus_id %d adll_offset= %d Success !\n",
1389 if_id, bus_id, adll_offset));
1390 continue;
1391 } else {
1392 DEBUG_LEVELING(
1393 DEBUG_LEVEL_ERROR,
1394 ("WL Supp: IF %d bus_id %d Failed !\n",
1395 if_id, bus_id));
1396 is_if_fail = 1;
1397 }
1398 }
1399 DEBUG_LEVELING(DEBUG_LEVEL_TRACE,
1400 ("WL Supp: IF %d bus_id %d is_pup_fail %d\n",
1401 if_id, bus_id, is_if_fail));
1402
1403 if (is_if_fail == 1) {
1404 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
1405 ("WL Supp: IF %d failed\n", if_id));
1406 training_result[training_stage][if_id] = TEST_FAILED;
1407 } else {
1408 training_result[training_stage][if_id] = TEST_SUCCESS;
1409 }
1410 }
1411
1412 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1413 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1414 if (training_result[training_stage][if_id] == TEST_FAILED)
1415 return MV_FAIL;
1416 }
1417
1418 return MV_OK;
1419}
1420
1421/*
1422 * Phase Shift
1423 */
1424static int ddr3_tip_wl_supp_align_phase_shift(u32 dev_num, u32 if_id,
1425 u32 bus_id, u32 offset,
1426 u32 bus_id_delta)
1427{
1428 wr_supp_res[if_id][bus_id].stage = PHASE_SHIFT;
1429 if (ddr3_tip_xsb_compare_test(dev_num, if_id, bus_id,
1430 0, bus_id_delta) == MV_OK) {
1431 wr_supp_res[if_id][bus_id].is_pup_fail = 0;
1432 return MV_OK;
1433 } else if (ddr3_tip_xsb_compare_test(dev_num, if_id, bus_id,
1434 ONE_CLOCK_ERROR_SHIFT,
1435 bus_id_delta) == MV_OK) {
1436 /* 1 clock error */
1437 wr_supp_res[if_id][bus_id].stage = CLOCK_SHIFT;
1438 DEBUG_LEVELING(DEBUG_LEVEL_TRACE,
1439 ("Supp: 1 error clock for if %d pup %d with ofsset %d success\n",
1440 if_id, bus_id, offset));
1441 ddr3_tip_wl_supp_one_clk_err_shift(dev_num, if_id, bus_id, 0);
1442 wr_supp_res[if_id][bus_id].is_pup_fail = 0;
1443 return MV_OK;
1444 } else if (ddr3_tip_xsb_compare_test(dev_num, if_id, bus_id,
1445 ALIGN_ERROR_SHIFT,
1446 bus_id_delta) == MV_OK) {
1447 /* align error */
1448 DEBUG_LEVELING(DEBUG_LEVEL_TRACE,
1449 ("Supp: align error for if %d pup %d with ofsset %d success\n",
1450 if_id, bus_id, offset));
1451 wr_supp_res[if_id][bus_id].stage = ALIGN_SHIFT;
1452 ddr3_tip_wl_supp_align_err_shift(dev_num, if_id, bus_id, 0);
1453 wr_supp_res[if_id][bus_id].is_pup_fail = 0;
1454 return MV_OK;
1455 } else {
1456 wr_supp_res[if_id][bus_id].is_pup_fail = 1;
1457 return MV_FAIL;
1458 }
1459}
1460
1461/*
1462 * Compare Test
1463 */
1464static int ddr3_tip_xsb_compare_test(u32 dev_num, u32 if_id, u32 bus_id,
1465 u32 edge_offset, u32 bus_id_delta)
1466{
1467 u32 num_of_succ_byte_compare, word_in_pattern, abs_offset;
1468 u32 word_offset, i;
1469 u32 read_pattern[TEST_PATTERN_LENGTH * 2];
1470 struct pattern_info *pattern_table = ddr3_tip_get_pattern_table();
1471 u32 pattern_test_pattern_table[8];
1472
1473 for (i = 0; i < 8; i++) {
1474 pattern_test_pattern_table[i] =
1475 pattern_table_get_word(dev_num, PATTERN_TEST, (u8)i);
1476 }
1477
1478 /* extern write, than read and compare */
1479 CHECK_STATUS(ddr3_tip_ext_write
1480 (dev_num, if_id,
1481 (pattern_table[PATTERN_TEST].start_addr +
1482 ((SDRAM_CS_SIZE + 1) * effective_cs)), 1,
1483 pattern_test_pattern_table));
1484
1485 CHECK_STATUS(ddr3_tip_reset_fifo_ptr(dev_num));
1486
1487 CHECK_STATUS(ddr3_tip_ext_read
1488 (dev_num, if_id,
1489 (pattern_table[PATTERN_TEST].start_addr +
1490 ((SDRAM_CS_SIZE + 1) * effective_cs)), 1, read_pattern));
1491
1492 DEBUG_LEVELING(
1493 DEBUG_LEVEL_TRACE,
1494 ("XSB-compt: IF %d bus_id %d 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1495 if_id, bus_id, read_pattern[0], read_pattern[1],
1496 read_pattern[2], read_pattern[3], read_pattern[4],
1497 read_pattern[5], read_pattern[6], read_pattern[7]));
1498
1499 /* compare byte per pup */
1500 num_of_succ_byte_compare = 0;
1501 for (word_in_pattern = start_xsb_offset;
1502 word_in_pattern < (TEST_PATTERN_LENGTH * 2); word_in_pattern++) {
1503 word_offset = word_in_pattern + edge_offset;
1504 if ((word_offset > (TEST_PATTERN_LENGTH * 2 - 1)) ||
1505 (word_offset < 0))
1506 continue;
1507
1508 if ((read_pattern[word_in_pattern] & pup_mask_table[bus_id]) ==
1509 (pattern_test_pattern_table[word_offset] &
1510 pup_mask_table[bus_id]))
1511 num_of_succ_byte_compare++;
1512 }
1513
1514 abs_offset = (edge_offset > 0) ? edge_offset : -edge_offset;
1515 if (num_of_succ_byte_compare == ((TEST_PATTERN_LENGTH * 2) -
1516 abs_offset - start_xsb_offset)) {
1517 DEBUG_LEVELING(
1518 DEBUG_LEVEL_TRACE,
1519 ("XSB-compt: IF %d bus_id %d num_of_succ_byte_compare %d - Success\n",
1520 if_id, bus_id, num_of_succ_byte_compare));
1521 return MV_OK;
1522 } else {
1523 DEBUG_LEVELING(
1524 DEBUG_LEVEL_TRACE,
1525 ("XSB-compt: IF %d bus_id %d num_of_succ_byte_compare %d - Fail !\n",
1526 if_id, bus_id, num_of_succ_byte_compare));
1527
1528 DEBUG_LEVELING(
1529 DEBUG_LEVEL_TRACE,
1530 ("XSB-compt: expected 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1531 pattern_test_pattern_table[0],
1532 pattern_test_pattern_table[1],
1533 pattern_test_pattern_table[2],
1534 pattern_test_pattern_table[3],
1535 pattern_test_pattern_table[4],
1536 pattern_test_pattern_table[5],
1537 pattern_test_pattern_table[6],
1538 pattern_test_pattern_table[7]));
1539 DEBUG_LEVELING(
1540 DEBUG_LEVEL_TRACE,
1541 ("XSB-compt: recieved 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
1542 read_pattern[0], read_pattern[1],
1543 read_pattern[2], read_pattern[3],
1544 read_pattern[4], read_pattern[5],
1545 read_pattern[6], read_pattern[7]));
1546
1547 DEBUG_LEVELING(
1548 DEBUG_LEVEL_TRACE,
1549 ("XSB-compt: IF %d bus_id %d num_of_succ_byte_compare %d - Fail !\n",
1550 if_id, bus_id, num_of_succ_byte_compare));
1551
1552 return MV_FAIL;
1553 }
1554}
1555
1556/*
1557 * Clock error shift - function moves the write leveling delay 1cc forward
1558 */
1559static int ddr3_tip_wl_supp_one_clk_err_shift(u32 dev_num, u32 if_id,
1560 u32 bus_id, u32 bus_id_delta)
1561{
1562 int phase, adll;
1563 u32 data;
1564 DEBUG_LEVELING(DEBUG_LEVEL_TRACE, ("One_clk_err_shift\n"));
1565
1566 CHECK_STATUS(ddr3_tip_bus_read
1567 (dev_num, if_id, ACCESS_TYPE_UNICAST, bus_id,
1568 DDR_PHY_DATA, WL_PHY_REG, &data));
1569 phase = ((data >> 6) & 0x7);
1570 adll = data & 0x1f;
1571 DEBUG_LEVELING(DEBUG_LEVEL_TRACE,
1572 ("One_clk_err_shift: IF %d bus_id %d phase %d adll %d\n",
1573 if_id, bus_id, phase, adll));
1574
1575 if ((phase == 0) || (phase == 1)) {
1576 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1577 (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_id,
1578 DDR_PHY_DATA, 0, (phase + 2), 0x1f));
1579 } else if (phase == 2) {
1580 if (adll < 6) {
1581 data = (3 << 6) + (0x1f);
1582 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1583 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1584 bus_id, DDR_PHY_DATA, 0, data,
1585 (0x7 << 6 | 0x1f)));
1586 data = 0x2f;
1587 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1588 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1589 bus_id, DDR_PHY_DATA, 1, data, 0x3f));
1590 }
1591 } else {
1592 /* phase 3 */
1593 return MV_FAIL;
1594 }
1595
1596 return MV_OK;
1597}
1598
1599/*
1600 * Align error shift
1601 */
1602static int ddr3_tip_wl_supp_align_err_shift(u32 dev_num, u32 if_id,
1603 u32 bus_id, u32 bus_id_delta)
1604{
1605 int phase, adll;
1606 u32 data;
1607
1608 /* Shift WL result 1 phase back */
1609 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id, ACCESS_TYPE_UNICAST,
1610 bus_id, DDR_PHY_DATA, WL_PHY_REG,
1611 &data));
1612 phase = ((data >> 6) & 0x7);
1613 adll = data & 0x1f;
1614 DEBUG_LEVELING(
1615 DEBUG_LEVEL_TRACE,
1616 ("Wl_supp_align_err_shift: IF %d bus_id %d phase %d adll %d\n",
1617 if_id, bus_id, phase, adll));
1618
1619 if (phase < 2) {
1620 if (adll > 0x1a) {
1621 if (phase == 0)
1622 return MV_FAIL;
1623
1624 if (phase == 1) {
1625 data = 0;
1626 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1627 (dev_num, ACCESS_TYPE_UNICAST,
1628 if_id, bus_id, DDR_PHY_DATA,
1629 0, data, (0x7 << 6 | 0x1f)));
1630 data = 0xf;
1631 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1632 (dev_num, ACCESS_TYPE_UNICAST,
1633 if_id, bus_id, DDR_PHY_DATA,
1634 1, data, 0x1f));
1635 return MV_OK;
1636 }
1637 } else {
1638 return MV_FAIL;
1639 }
1640 } else if ((phase == 2) || (phase == 3)) {
1641 phase = phase - 2;
1642 data = (phase << 6) + (adll & 0x1f);
1643 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1644 (dev_num, ACCESS_TYPE_UNICAST, if_id, bus_id,
1645 DDR_PHY_DATA, 0, data, (0x7 << 6 | 0x1f)));
1646 return MV_OK;
1647 } else {
1648 DEBUG_LEVELING(DEBUG_LEVEL_ERROR,
1649 ("Wl_supp_align_err_shift: unexpected phase\n"));
1650
1651 return MV_FAIL;
1652 }
1653
1654 return MV_OK;
1655}
1656
1657/*
1658 * Dynamic write leveling sequence
1659 */
1660static int ddr3_tip_dynamic_write_leveling_seq(u32 dev_num)
1661{
1662 u32 bus_id, dq_id;
1663 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
1664 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
1665 struct hws_topology_map *tm = ddr3_get_topology_map();
1666
1667 CHECK_STATUS(ddr3_tip_if_write
1668 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1669 TRAINING_SW_2_REG, 0x1, 0x5));
1670 CHECK_STATUS(ddr3_tip_if_write
1671 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1672 TRAINING_WRITE_LEVELING_REG, 0x50, 0xff));
1673 CHECK_STATUS(ddr3_tip_if_write
1674 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1675 TRAINING_WRITE_LEVELING_REG, 0x5c, 0xff));
1676 CHECK_STATUS(ddr3_tip_if_write
1677 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1678 ODPG_TRAINING_CONTROL_REG, 0x381b82, 0x3c3faf));
1679 CHECK_STATUS(ddr3_tip_if_write
1680 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1681 ODPG_OBJ1_OPCODE_REG, (0x3 << 25), (0x3ffff << 9)));
1682 CHECK_STATUS(ddr3_tip_if_write
1683 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1684 ODPG_OBJ1_ITER_CNT_REG, 0x80, 0xffff));
1685 CHECK_STATUS(ddr3_tip_if_write
1686 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1687 ODPG_WRITE_LEVELING_DONE_CNTR_REG, 0x14, 0xff));
1688 CHECK_STATUS(ddr3_tip_if_write
1689 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1690 TRAINING_WRITE_LEVELING_REG, 0xff5c, 0xffff));
1691
1692 /* mask PBS */
1693 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) {
1694 CHECK_STATUS(ddr3_tip_if_write
1695 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1696 mask_results_dq_reg_map[dq_id], 0x1 << 24,
1697 0x1 << 24));
1698 }
1699
1700 /* Mask all results */
1701 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1702 CHECK_STATUS(ddr3_tip_if_write
1703 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1704 mask_results_pup_reg_map[bus_id], 0x1 << 24,
1705 0x1 << 24));
1706 }
1707
1708 /* Unmask only wanted */
1709 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1710 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1711 CHECK_STATUS(ddr3_tip_if_write
1712 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1713 mask_results_pup_reg_map[bus_id], 0, 0x1 << 24));
1714 }
1715
1716 CHECK_STATUS(ddr3_tip_if_write
1717 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1718 WR_LEVELING_DQS_PATTERN_REG, 0x1, 0x1));
1719
1720 return MV_OK;
1721}
1722
1723/*
1724 * Dynamic read leveling sequence
1725 */
1726static int ddr3_tip_dynamic_read_leveling_seq(u32 dev_num)
1727{
1728 u32 bus_id, dq_id;
1729 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
1730 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
1731 struct hws_topology_map *tm = ddr3_get_topology_map();
1732
1733 /* mask PBS */
1734 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) {
1735 CHECK_STATUS(ddr3_tip_if_write
1736 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1737 mask_results_dq_reg_map[dq_id], 0x1 << 24,
1738 0x1 << 24));
1739 }
1740
1741 /* Mask all results */
1742 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1743 CHECK_STATUS(ddr3_tip_if_write
1744 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1745 mask_results_pup_reg_map[bus_id], 0x1 << 24,
1746 0x1 << 24));
1747 }
1748
1749 /* Unmask only wanted */
1750 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1751 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1752 CHECK_STATUS(ddr3_tip_if_write
1753 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1754 mask_results_pup_reg_map[bus_id], 0, 0x1 << 24));
1755 }
1756
1757 return MV_OK;
1758}
1759
1760/*
1761 * Dynamic read leveling sequence
1762 */
1763static int ddr3_tip_dynamic_per_bit_read_leveling_seq(u32 dev_num)
1764{
1765 u32 bus_id, dq_id;
1766 u16 *mask_results_pup_reg_map = ddr3_tip_get_mask_results_pup_reg_map();
1767 u16 *mask_results_dq_reg_map = ddr3_tip_get_mask_results_dq_reg();
1768 struct hws_topology_map *tm = ddr3_get_topology_map();
1769
1770 /* mask PBS */
1771 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) {
1772 CHECK_STATUS(ddr3_tip_if_write
1773 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1774 mask_results_dq_reg_map[dq_id], 0x1 << 24,
1775 0x1 << 24));
1776 }
1777
1778 /* Mask all results */
1779 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface; bus_id++) {
1780 CHECK_STATUS(ddr3_tip_if_write
1781 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1782 mask_results_pup_reg_map[bus_id], 0x1 << 24,
1783 0x1 << 24));
1784 }
1785
1786 /* Unmask only wanted */
1787 for (dq_id = 0; dq_id < MAX_DQ_NUM; dq_id++) {
1788 VALIDATE_ACTIVE(tm->bus_act_mask, dq_id / 8);
1789 CHECK_STATUS(ddr3_tip_if_write
1790 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1791 mask_results_dq_reg_map[dq_id], 0x0 << 24,
1792 0x1 << 24));
1793 }
1794
1795 return MV_OK;
1796}
1797
1798/*
1799 * Print write leveling supplementary results
1800 */
1801int ddr3_tip_print_wl_supp_result(u32 dev_num)
1802{
1803 u32 bus_id = 0, if_id = 0;
1804 struct hws_topology_map *tm = ddr3_get_topology_map();
1805
1806 DEBUG_LEVELING(DEBUG_LEVEL_INFO,
1807 ("I/F0 PUP0 Result[0 - success, 1-fail] ...\n"));
1808
1809 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1810 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1811 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
1812 bus_id++) {
1813 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1814 DEBUG_LEVELING(DEBUG_LEVEL_INFO,
1815 ("%d ,", wr_supp_res[if_id]
1816 [bus_id].is_pup_fail));
1817 }
1818 }
1819 DEBUG_LEVELING(
1820 DEBUG_LEVEL_INFO,
1821 ("I/F0 PUP0 Stage[0-phase_shift, 1-clock_shift, 2-align_shift] ...\n"));
1822
1823 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1824 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1825 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
1826 bus_id++) {
1827 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
1828 DEBUG_LEVELING(DEBUG_LEVEL_INFO,
1829 ("%d ,", wr_supp_res[if_id]
1830 [bus_id].stage));
1831 }
1832 }
1833
1834 return MV_OK;
1835}