blob: a704a3e9d314fe24d57e57a091de006f771fca9d [file] [log] [blame]
Stefan Roese5ffceb82015-03-26 15:36:56 +01001/*
2 * Copyright (C) Marvell International Ltd. and its affiliates
3 *
4 * SPDX-License-Identifier: GPL-2.0
5 */
6
7#include <common.h>
8#include <i2c.h>
9#include <spl.h>
10#include <asm/io.h>
11#include <asm/arch/cpu.h>
12#include <asm/arch/soc.h>
13
14#include "ddr3_init.h"
15
16u8 is_reg_dump = 0;
17u8 debug_pbs = DEBUG_LEVEL_ERROR;
18
19/*
20 * API to change flags outside of the lib
21 */
22#ifndef SILENT_LIB
23/* Debug flags for other Training modules */
24u8 debug_training_static = DEBUG_LEVEL_ERROR;
25u8 debug_training = DEBUG_LEVEL_ERROR;
26u8 debug_leveling = DEBUG_LEVEL_ERROR;
27u8 debug_centralization = DEBUG_LEVEL_ERROR;
28u8 debug_training_ip = DEBUG_LEVEL_ERROR;
29u8 debug_training_bist = DEBUG_LEVEL_ERROR;
30u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
31u8 debug_training_access = DEBUG_LEVEL_ERROR;
32u8 debug_training_a38x = DEBUG_LEVEL_ERROR;
33
34void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
35{
36 switch (block) {
37 case DEBUG_BLOCK_STATIC:
38 debug_training_static = level;
39 break;
40 case DEBUG_BLOCK_TRAINING_MAIN:
41 debug_training = level;
42 break;
43 case DEBUG_BLOCK_LEVELING:
44 debug_leveling = level;
45 break;
46 case DEBUG_BLOCK_CENTRALIZATION:
47 debug_centralization = level;
48 break;
49 case DEBUG_BLOCK_PBS:
50 debug_pbs = level;
51 break;
52 case DEBUG_BLOCK_ALG:
53 debug_training_hw_alg = level;
54 break;
55 case DEBUG_BLOCK_DEVICE:
56 debug_training_a38x = level;
57 break;
58 case DEBUG_BLOCK_ACCESS:
59 debug_training_access = level;
60 break;
61 case DEBUG_STAGES_REG_DUMP:
62 if (level == DEBUG_LEVEL_TRACE)
63 is_reg_dump = 1;
64 else
65 is_reg_dump = 0;
66 break;
67 case DEBUG_BLOCK_ALL:
68 default:
69 debug_training_static = level;
70 debug_training = level;
71 debug_leveling = level;
72 debug_centralization = level;
73 debug_pbs = level;
74 debug_training_hw_alg = level;
75 debug_training_access = level;
76 debug_training_a38x = level;
77 }
78}
79#else
80void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
81{
82 return;
83}
84#endif
85
86struct hws_tip_config_func_db config_func_info[HWS_MAX_DEVICE_NUM];
87u8 is_default_centralization = 0;
88u8 is_tune_result = 0;
89u8 is_validate_window_per_if = 0;
90u8 is_validate_window_per_pup = 0;
91u8 sweep_cnt = 1;
92u32 is_bist_reset_bit = 1;
93static struct hws_xsb_info xsb_info[HWS_MAX_DEVICE_NUM];
94
95/*
96 * Dump Dunit & Phy registers
97 */
98int ddr3_tip_reg_dump(u32 dev_num)
99{
100 u32 if_id, reg_addr, data_value, bus_id;
101 u32 read_data[MAX_INTERFACE_NUM];
102 struct hws_topology_map *tm = ddr3_get_topology_map();
103
104 printf("-- dunit registers --\n");
105 for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
106 printf("0x%x ", reg_addr);
107 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
108 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
109 CHECK_STATUS(ddr3_tip_if_read
110 (dev_num, ACCESS_TYPE_UNICAST,
111 if_id, reg_addr, read_data,
112 MASK_ALL_BITS));
113 printf("0x%x ", read_data[if_id]);
114 }
115 printf("\n");
116 }
117
118 printf("-- Phy registers --\n");
119 for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
120 printf("0x%x ", reg_addr);
121 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
122 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
123 for (bus_id = 0;
124 bus_id < tm->num_of_bus_per_interface;
125 bus_id++) {
126 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
127 CHECK_STATUS(ddr3_tip_bus_read
128 (dev_num, if_id,
129 ACCESS_TYPE_UNICAST, bus_id,
130 DDR_PHY_DATA, reg_addr,
131 &data_value));
132 printf("0x%x ", data_value);
133 }
134 for (bus_id = 0;
135 bus_id < tm->num_of_bus_per_interface;
136 bus_id++) {
137 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
138 CHECK_STATUS(ddr3_tip_bus_read
139 (dev_num, if_id,
140 ACCESS_TYPE_UNICAST, bus_id,
141 DDR_PHY_CONTROL, reg_addr,
142 &data_value));
143 printf("0x%x ", data_value);
144 }
145 }
146 printf("\n");
147 }
148
149 return MV_OK;
150}
151
152/*
153 * Register access func registration
154 */
155int ddr3_tip_init_config_func(u32 dev_num,
156 struct hws_tip_config_func_db *config_func)
157{
158 if (config_func == NULL)
159 return MV_BAD_PARAM;
160
161 memcpy(&config_func_info[dev_num], config_func,
162 sizeof(struct hws_tip_config_func_db));
163
164 return MV_OK;
165}
166
167/*
Stefan Roese5ffceb82015-03-26 15:36:56 +0100168 * Get training result info pointer
169 */
170enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
171{
172 return training_result[stage];
173}
174
175/*
176 * Device info read
177 */
178int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
179{
180 if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
181 return config_func_info[dev_num].
182 tip_get_device_info_func((u8) dev_num, info_ptr);
183 }
184
185 return MV_FAIL;
186}
187
188#ifndef EXCLUDE_SWITCH_DEBUG
189/*
190 * Convert freq to character string
191 */
192static char *convert_freq(enum hws_ddr_freq freq)
193{
194 switch (freq) {
195 case DDR_FREQ_LOW_FREQ:
196 return "DDR_FREQ_LOW_FREQ";
197 case DDR_FREQ_400:
198 return "400";
199
200 case DDR_FREQ_533:
201 return "533";
202 case DDR_FREQ_667:
203 return "667";
204
205 case DDR_FREQ_800:
206 return "800";
207
208 case DDR_FREQ_933:
209 return "933";
210
211 case DDR_FREQ_1066:
212 return "1066";
213 case DDR_FREQ_311:
214 return "311";
215
216 case DDR_FREQ_333:
217 return "333";
218
219 case DDR_FREQ_467:
220 return "467";
221
222 case DDR_FREQ_850:
223 return "850";
224
225 case DDR_FREQ_900:
226 return "900";
227
228 case DDR_FREQ_360:
229 return "DDR_FREQ_360";
230
231 case DDR_FREQ_1000:
232 return "DDR_FREQ_1000";
233 default:
234 return "Unknown Frequency";
235 }
236}
237
238/*
239 * Convert device ID to character string
240 */
241static char *convert_dev_id(u32 dev_id)
242{
243 switch (dev_id) {
244 case 0x6800:
245 return "A38xx";
246 case 0x6900:
247 return "A39XX";
248 case 0xf400:
249 return "AC3";
250 case 0xfc00:
251 return "BC2";
252
253 default:
254 return "Unknown Device";
255 }
256}
257
258/*
259 * Convert device ID to character string
260 */
261static char *convert_mem_size(u32 dev_id)
262{
263 switch (dev_id) {
264 case 0:
265 return "512 MB";
266 case 1:
267 return "1 GB";
268 case 2:
269 return "2 GB";
270 case 3:
271 return "4 GB";
272 case 4:
273 return "8 GB";
274
275 default:
276 return "wrong mem size";
277 }
278}
279
280int print_device_info(u8 dev_num)
281{
282 struct ddr3_device_info info_ptr;
283 struct hws_topology_map *tm = ddr3_get_topology_map();
284
285 CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
286 printf("=== DDR setup START===\n");
287 printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
288 printf("\tDDR3 CK delay: %d\n", info_ptr.ck_delay);
289 print_topology(tm);
290 printf("=== DDR setup END===\n");
291
292 return MV_OK;
293}
294
295void hws_ddr3_tip_sweep_test(int enable)
296{
297 if (enable) {
298 is_validate_window_per_if = 1;
299 is_validate_window_per_pup = 1;
300 debug_training = DEBUG_LEVEL_TRACE;
301 } else {
302 is_validate_window_per_if = 0;
303 is_validate_window_per_pup = 0;
304 }
305}
306#endif
307
308char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
309{
310 switch (tune_result) {
311 case TEST_FAILED:
312 return "FAILED";
313 case TEST_SUCCESS:
314 return "PASS";
315 case NO_TEST_DONE:
316 return "NOT COMPLETED";
317 default:
318 return "Un-KNOWN";
319 }
320}
321
322/*
323 * Print log info
324 */
325int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
326{
327 u32 if_id = 0;
328 struct hws_topology_map *tm = ddr3_get_topology_map();
329
Stefan Roese5ffceb82015-03-26 15:36:56 +0100330#ifndef EXCLUDE_SWITCH_DEBUG
331 if ((is_validate_window_per_if != 0) ||
332 (is_validate_window_per_pup != 0)) {
333 u32 is_pup_log = 0;
334 enum hws_ddr_freq freq;
335
336 freq = tm->interface_params[first_active_if].memory_freq;
337
338 is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
339 printf("===VALIDATE WINDOW LOG START===\n");
340 printf("DDR Frequency: %s ======\n", convert_freq(freq));
341 /* print sweep windows */
342 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
343 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
344 ddr3_tip_print_all_pbs_result(dev_num);
345 ddr3_tip_print_wl_supp_result(dev_num);
346 printf("===VALIDATE WINDOW LOG END ===\n");
347 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
348 ddr3_tip_reg_dump(dev_num);
349 }
350#endif
351
352 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
353 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
354
355 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
356 ("IF %d Status:\n", if_id));
357
358 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
359 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
360 ("\tInit Controller: %s\n",
361 ddr3_tip_convert_tune_result
362 (training_result[INIT_CONTROLLER]
363 [if_id])));
364 }
365 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
366 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
367 ("\tLow freq Config: %s\n",
368 ddr3_tip_convert_tune_result
369 (training_result[SET_LOW_FREQ]
370 [if_id])));
371 }
372 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
373 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
374 ("\tLoad Pattern: %s\n",
375 ddr3_tip_convert_tune_result
376 (training_result[LOAD_PATTERN]
377 [if_id])));
378 }
379 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
380 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
381 ("\tMedium freq Config: %s\n",
382 ddr3_tip_convert_tune_result
383 (training_result[SET_MEDIUM_FREQ]
384 [if_id])));
385 }
386 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
387 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
388 ("\tWL: %s\n",
389 ddr3_tip_convert_tune_result
390 (training_result[WRITE_LEVELING]
391 [if_id])));
392 }
393 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
394 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
395 ("\tLoad Pattern: %s\n",
396 ddr3_tip_convert_tune_result
397 (training_result[LOAD_PATTERN_2]
398 [if_id])));
399 }
400 if (mask_tune_func & READ_LEVELING_MASK_BIT) {
401 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
402 ("\tRL: %s\n",
403 ddr3_tip_convert_tune_result
404 (training_result[READ_LEVELING]
405 [if_id])));
406 }
407 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
408 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
409 ("\tWL Supp: %s\n",
410 ddr3_tip_convert_tune_result
411 (training_result[WRITE_LEVELING_SUPP]
412 [if_id])));
413 }
414 if (mask_tune_func & PBS_RX_MASK_BIT) {
415 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
416 ("\tPBS RX: %s\n",
417 ddr3_tip_convert_tune_result
418 (training_result[PBS_RX]
419 [if_id])));
420 }
421 if (mask_tune_func & PBS_TX_MASK_BIT) {
422 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
423 ("\tPBS TX: %s\n",
424 ddr3_tip_convert_tune_result
425 (training_result[PBS_TX]
426 [if_id])));
427 }
428 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
429 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
430 ("\tTarget freq Config: %s\n",
431 ddr3_tip_convert_tune_result
432 (training_result[SET_TARGET_FREQ]
433 [if_id])));
434 }
435 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
436 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
437 ("\tWL TF: %s\n",
438 ddr3_tip_convert_tune_result
439 (training_result[WRITE_LEVELING_TF]
440 [if_id])));
441 }
442 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
443 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
444 ("\tRL TF: %s\n",
445 ddr3_tip_convert_tune_result
446 (training_result[READ_LEVELING_TF]
447 [if_id])));
448 }
449 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
450 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
451 ("\tWL TF Supp: %s\n",
452 ddr3_tip_convert_tune_result
453 (training_result
454 [WRITE_LEVELING_SUPP_TF]
455 [if_id])));
456 }
457 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
458 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
459 ("\tCentr RX: %s\n",
460 ddr3_tip_convert_tune_result
461 (training_result[CENTRALIZATION_RX]
462 [if_id])));
463 }
464 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
465 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
466 ("\tVREF_CALIBRATION: %s\n",
467 ddr3_tip_convert_tune_result
468 (training_result[VREF_CALIBRATION]
469 [if_id])));
470 }
471 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
472 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
473 ("\tCentr TX: %s\n",
474 ddr3_tip_convert_tune_result
475 (training_result[CENTRALIZATION_TX]
476 [if_id])));
477 }
478 }
479
480 return MV_OK;
481}
482
483/*
484 * Print stability log info
485 */
486int ddr3_tip_print_stability_log(u32 dev_num)
487{
488 u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
489 u32 reg_data;
490 u32 read_data[MAX_INTERFACE_NUM];
491 u32 max_cs = hws_ddr3_tip_max_cs_get();
492 struct hws_topology_map *tm = ddr3_get_topology_map();
493
494 /* Title print */
495 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
496 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
497 printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
498 for (csindex = 0; csindex < max_cs; csindex++) {
499 printf("CS%d , ", csindex);
500 printf("\n");
501 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
502 printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
503 printf("\t\t");
504 for (idx = 0; idx < 11; idx++)
505 printf("PBSTx-Pad%d,", idx);
506 printf("\t\t");
507 for (idx = 0; idx < 11; idx++)
508 printf("PBSRx-Pad%d,", idx);
509 }
510 }
511 printf("\n");
512
513 /* Data print */
514 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
515 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
516
517 printf("Data: %d,%d,", if_id,
518 (config_func_info[dev_num].tip_get_temperature != NULL)
519 ? (config_func_info[dev_num].
520 tip_get_temperature(dev_num)) : (0));
521
522 CHECK_STATUS(ddr3_tip_if_read
523 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
524 read_data, MASK_ALL_BITS));
525 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
526 ((read_data[if_id] & 0xfc00) >> 10));
527 CHECK_STATUS(ddr3_tip_if_read
528 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
529 read_data, MASK_ALL_BITS));
530 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
531 ((read_data[if_id] & 0xfc00) >> 10));
532 CHECK_STATUS(ddr3_tip_if_read
533 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
534 read_data, MASK_ALL_BITS));
535 printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
536 ((read_data[if_id] & 0xfc00000) >> 22));
537
538 for (csindex = 0; csindex < max_cs; csindex++) {
539 printf("CS%d , ", csindex);
540 for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
541 printf("\n");
542 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
543 ddr3_tip_bus_read(dev_num, if_id,
544 ACCESS_TYPE_UNICAST,
545 bus_id, DDR_PHY_DATA,
546 RESULT_DB_PHY_REG_ADDR +
547 csindex, &reg_data);
548 printf("%d,%d,", (reg_data & 0x1f),
549 ((reg_data & 0x3e0) >> 5));
550 /* WL */
551 ddr3_tip_bus_read(dev_num, if_id,
552 ACCESS_TYPE_UNICAST,
553 bus_id, DDR_PHY_DATA,
554 WL_PHY_REG +
555 csindex * 4, &reg_data);
556 printf("%d,%d,%d,",
557 (reg_data & 0x1f) +
558 ((reg_data & 0x1c0) >> 6) * 32,
559 (reg_data & 0x1f),
560 (reg_data & 0x1c0) >> 6);
561 /* RL */
562 CHECK_STATUS(ddr3_tip_if_read
563 (dev_num, ACCESS_TYPE_UNICAST,
564 if_id,
565 READ_DATA_SAMPLE_DELAY,
566 read_data, MASK_ALL_BITS));
567 read_data[if_id] =
568 (read_data[if_id] &
569 (0xf << (4 * csindex))) >>
570 (4 * csindex);
571 ddr3_tip_bus_read(dev_num, if_id,
572 ACCESS_TYPE_UNICAST, bus_id,
573 DDR_PHY_DATA,
574 RL_PHY_REG + csindex * 4,
575 &reg_data);
576 printf("%d,%d,%d,%d,",
577 (reg_data & 0x1f) +
578 ((reg_data & 0x1c0) >> 6) * 32 +
579 read_data[if_id] * 64,
580 (reg_data & 0x1f),
581 ((reg_data & 0x1c0) >> 6),
582 read_data[if_id]);
583 /* Centralization */
584 ddr3_tip_bus_read(dev_num, if_id,
585 ACCESS_TYPE_UNICAST, bus_id,
586 DDR_PHY_DATA,
587 WRITE_CENTRALIZATION_PHY_REG
588 + csindex * 4, &reg_data);
589 printf("%d,", (reg_data & 0x3f));
590 ddr3_tip_bus_read(dev_num, if_id,
591 ACCESS_TYPE_UNICAST, bus_id,
592 DDR_PHY_DATA,
593 READ_CENTRALIZATION_PHY_REG
594 + csindex * 4, &reg_data);
595 printf("%d,", (reg_data & 0x1f));
596 /* Vref */
597 ddr3_tip_bus_read(dev_num, if_id,
598 ACCESS_TYPE_UNICAST, bus_id,
599 DDR_PHY_DATA,
600 PAD_CONFIG_PHY_REG,
601 &reg_data);
602 printf("%d,", (reg_data & 0x7));
603 /* DQVref */
604 /* Need to add the Read Function from device */
605 printf("%d,", 0);
606 printf("\t\t");
607 for (idx = 0; idx < 11; idx++) {
608 ddr3_tip_bus_read(dev_num, if_id,
609 ACCESS_TYPE_UNICAST,
610 bus_id, DDR_PHY_DATA,
611 0xd0 +
612 12 * csindex +
613 idx, &reg_data);
614 printf("%d,", (reg_data & 0x3f));
615 }
616 printf("\t\t");
617 for (idx = 0; idx < 11; idx++) {
618 ddr3_tip_bus_read(dev_num, if_id,
619 ACCESS_TYPE_UNICAST,
620 bus_id, DDR_PHY_DATA,
621 0x10 +
622 16 * csindex +
623 idx, &reg_data);
624 printf("%d,", (reg_data & 0x3f));
625 }
626 printf("\t\t");
627 for (idx = 0; idx < 11; idx++) {
628 ddr3_tip_bus_read(dev_num, if_id,
629 ACCESS_TYPE_UNICAST,
630 bus_id, DDR_PHY_DATA,
631 0x50 +
632 16 * csindex +
633 idx, &reg_data);
634 printf("%d,", (reg_data & 0x3f));
635 }
636 }
637 }
638 }
639 printf("\n");
640
641 return MV_OK;
642}
643
644/*
645 * Register XSB information
646 */
647int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
648{
649 memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
650 return MV_OK;
651}
652
653/*
654 * Read ADLL Value
655 */
656int read_adll_value(u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
657 int reg_addr, u32 mask)
658{
659 u32 data_value;
660 u32 if_id = 0, bus_id = 0;
661 u32 dev_num = 0;
662 struct hws_topology_map *tm = ddr3_get_topology_map();
663
664 /*
665 * multi CS support - reg_addr is calucalated in calling function
666 * with CS offset
667 */
668 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
669 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
670 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
671 bus_id++) {
672 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
673 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
674 ACCESS_TYPE_UNICAST,
675 bus_id,
676 DDR_PHY_DATA, reg_addr,
677 &data_value));
678 pup_values[if_id *
679 tm->num_of_bus_per_interface + bus_id] =
680 data_value & mask;
681 }
682 }
683
684 return 0;
685}
686
687/*
688 * Write ADLL Value
689 */
690int write_adll_value(u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
691 int reg_addr)
692{
693 u32 if_id = 0, bus_id = 0;
694 u32 dev_num = 0, data;
695 struct hws_topology_map *tm = ddr3_get_topology_map();
696
697 /*
698 * multi CS support - reg_addr is calucalated in calling function
699 * with CS offset
700 */
701 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
702 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
703 for (bus_id = 0; bus_id < tm->num_of_bus_per_interface;
704 bus_id++) {
705 VALIDATE_ACTIVE(tm->bus_act_mask, bus_id);
706 data = pup_values[if_id *
707 tm->num_of_bus_per_interface +
708 bus_id];
709 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
710 ACCESS_TYPE_UNICAST,
711 if_id,
712 ACCESS_TYPE_UNICAST,
713 bus_id, DDR_PHY_DATA,
714 reg_addr, data));
715 }
716 }
717
718 return 0;
719}
720
721#ifndef EXCLUDE_SWITCH_DEBUG
722u32 rl_version = 1; /* 0 - old RL machine */
723struct hws_tip_config_func_db config_func_info[HWS_MAX_DEVICE_NUM];
724u32 start_xsb_offset = 0;
725u8 is_rl_old = 0;
726u8 is_freq_old = 0;
727u8 is_dfs_disabled = 0;
728u32 default_centrlization_value = 0x12;
729u32 vref = 0x4;
730u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
731 rl_test = 0, reset_read_fifo = 0;
732int debug_acc = 0;
733u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
734u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
735u8 cs_mask_reg[] = {
736 0, 4, 8, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
737};
738
739u32 xsb_test_table[][8] = {
740 {0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
741 0x66666666, 0x77777777},
742 {0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
743 0xeeeeeeee, 0xffffffff},
744 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
745 0x00000000, 0xffffffff},
746 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
747 0x00000000, 0xffffffff},
748 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
749 0x00000000, 0xffffffff},
750 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
751 0x00000000, 0xffffffff},
752 {0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
753 0xffffffff, 0xffffffff},
754 {0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
755 0x00000000, 0x00000000},
756 {0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
757 0xffffffff, 0xffffffff}
758};
759
760static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr);
761
762int ddr3_tip_print_adll(void)
763{
764 u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
765 struct hws_topology_map *tm = ddr3_get_topology_map();
766
767 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
768 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
769 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
770 bus_cnt++) {
771 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
772 CHECK_STATUS(ddr3_tip_bus_read
773 (dev_num, if_id,
774 ACCESS_TYPE_UNICAST, bus_cnt,
775 DDR_PHY_DATA, 0x1, &data_p1));
776 CHECK_STATUS(ddr3_tip_bus_read
777 (dev_num, if_id, ACCESS_TYPE_UNICAST,
778 bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
779 CHECK_STATUS(ddr3_tip_bus_read
780 (dev_num, if_id, ACCESS_TYPE_UNICAST,
781 bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
782 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
783 (" IF %d bus_cnt %d phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
784 if_id, bus_cnt, data_p1, data_p2,
785 ui_data3));
786 }
787 }
788
789 return MV_OK;
790}
791
792/*
793 * Set attribute value
794 */
795int ddr3_tip_set_atr(u32 dev_num, u32 flag_id, u32 value)
796{
797 int ret;
798 u32 *ptr_flag = NULL;
799
800 ret = ddr3_tip_access_atr(dev_num, flag_id, value, &ptr_flag);
801 if (ptr_flag != NULL) {
802 printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x (was 0x%x)\n",
803 flag_id, value, *ptr_flag);
804 *ptr_flag = value;
805 } else {
806 printf("ddr3_tip_set_atr Flag ID 0x%x value is set to 0x%x\n",
807 flag_id, value);
808 }
809
810 return ret;
811}
812
813/*
814 * Access attribute
815 */
816static int ddr3_tip_access_atr(u32 dev_num, u32 flag_id, u32 value, u32 **ptr)
817{
818 u32 tmp_val = 0, if_id = 0, pup_id = 0;
819 struct hws_topology_map *tm = ddr3_get_topology_map();
820
Stefan Roese5ffceb82015-03-26 15:36:56 +0100821 *ptr = NULL;
822
823 switch (flag_id) {
824 case 0:
825 *ptr = (u32 *)&(tm->if_act_mask);
826 break;
827
828 case 0x1:
829 *ptr = (u32 *)&mask_tune_func;
830 break;
831
832 case 0x2:
833 *ptr = (u32 *)&low_freq;
834 break;
835
836 case 0x3:
837 *ptr = (u32 *)&medium_freq;
838 break;
839
840 case 0x4:
841 *ptr = (u32 *)&generic_init_controller;
842 break;
843
844 case 0x5:
845 *ptr = (u32 *)&rl_version;
846 break;
847
848 case 0x8:
849 *ptr = (u32 *)&start_xsb_offset;
850 break;
851
852 case 0x20:
853 *ptr = (u32 *)&is_rl_old;
854 break;
855
856 case 0x21:
857 *ptr = (u32 *)&is_freq_old;
858 break;
859
860 case 0x23:
861 *ptr = (u32 *)&is_dfs_disabled;
862 break;
863
864 case 0x24:
865 *ptr = (u32 *)&is_pll_before_init;
866 break;
867
868 case 0x25:
869 *ptr = (u32 *)&is_adll_calib_before_init;
870 break;
871#ifdef STATIC_ALGO_SUPPORT
872 case 0x26:
873 *ptr = (u32 *)&(silicon_delay[0]);
874 break;
875
876 case 0x27:
877 *ptr = (u32 *)&wl_debug_delay;
878 break;
879#endif
880 case 0x28:
881 *ptr = (u32 *)&is_tune_result;
882 break;
883
884 case 0x29:
885 *ptr = (u32 *)&is_validate_window_per_if;
886 break;
887
888 case 0x2a:
889 *ptr = (u32 *)&is_validate_window_per_pup;
890 break;
891
892 case 0x30:
893 *ptr = (u32 *)&sweep_cnt;
894 break;
895
896 case 0x31:
897 *ptr = (u32 *)&is_bist_reset_bit;
898 break;
899
900 case 0x32:
901 *ptr = (u32 *)&is_dfs_in_init;
902 break;
903
904 case 0x33:
905 *ptr = (u32 *)&p_finger;
906 break;
907
908 case 0x34:
909 *ptr = (u32 *)&n_finger;
910 break;
911
912 case 0x35:
913 *ptr = (u32 *)&init_freq;
914 break;
915
916 case 0x36:
917 *ptr = (u32 *)&(freq_val[DDR_FREQ_LOW_FREQ]);
918 break;
919
920 case 0x37:
921 *ptr = (u32 *)&start_pattern;
922 break;
923
924 case 0x38:
925 *ptr = (u32 *)&end_pattern;
926 break;
927
928 case 0x39:
929 *ptr = (u32 *)&phy_reg0_val;
930 break;
931
932 case 0x4a:
933 *ptr = (u32 *)&phy_reg1_val;
934 break;
935
936 case 0x4b:
937 *ptr = (u32 *)&phy_reg2_val;
938 break;
939
940 case 0x4c:
941 *ptr = (u32 *)&phy_reg3_val;
942 break;
943
944 case 0x4e:
945 *ptr = (u32 *)&sweep_pattern;
946 break;
947
948 case 0x50:
949 *ptr = (u32 *)&is_rzq6;
950 break;
951
952 case 0x51:
953 *ptr = (u32 *)&znri_data_phy_val;
954 break;
955
956 case 0x52:
957 *ptr = (u32 *)&zpri_data_phy_val;
958 break;
959
960 case 0x53:
961 *ptr = (u32 *)&finger_test;
962 break;
963
964 case 0x54:
965 *ptr = (u32 *)&n_finger_start;
966 break;
967
968 case 0x55:
969 *ptr = (u32 *)&n_finger_end;
970 break;
971
972 case 0x56:
973 *ptr = (u32 *)&p_finger_start;
974 break;
975
976 case 0x57:
977 *ptr = (u32 *)&p_finger_end;
978 break;
979
980 case 0x58:
981 *ptr = (u32 *)&p_finger_step;
982 break;
983
984 case 0x59:
985 *ptr = (u32 *)&n_finger_step;
986 break;
987
988 case 0x5a:
989 *ptr = (u32 *)&znri_ctrl_phy_val;
990 break;
991
992 case 0x5b:
993 *ptr = (u32 *)&zpri_ctrl_phy_val;
994 break;
995
996 case 0x5c:
997 *ptr = (u32 *)&is_reg_dump;
998 break;
999
1000 case 0x5d:
1001 *ptr = (u32 *)&vref;
1002 break;
1003
1004 case 0x5e:
1005 *ptr = (u32 *)&mode2_t;
1006 break;
1007
1008 case 0x5f:
1009 *ptr = (u32 *)&xsb_validate_type;
1010 break;
1011
1012 case 0x60:
1013 *ptr = (u32 *)&xsb_validation_base_address;
1014 break;
1015
1016 case 0x67:
1017 *ptr = (u32 *)&activate_select_before_run_alg;
1018 break;
1019
1020 case 0x68:
1021 *ptr = (u32 *)&activate_deselect_after_run_alg;
1022 break;
1023
1024 case 0x69:
1025 *ptr = (u32 *)&odt_additional;
1026 break;
1027
1028 case 0x70:
1029 *ptr = (u32 *)&debug_mode;
1030 break;
1031
1032 case 0x71:
1033 *ptr = (u32 *)&pbs_pattern;
1034 break;
1035
1036 case 0x72:
1037 *ptr = (u32 *)&delay_enable;
1038 break;
1039
1040 case 0x73:
1041 *ptr = (u32 *)&ck_delay;
1042 break;
1043
1044 case 0x74:
1045 *ptr = (u32 *)&ck_delay_16;
1046 break;
1047
1048 case 0x75:
1049 *ptr = (u32 *)&ca_delay;
1050 break;
1051
1052 case 0x100:
1053 *ptr = (u32 *)&debug_dunit;
1054 break;
1055
1056 case 0x101:
1057 debug_acc = (int)value;
1058 break;
1059
1060 case 0x102:
1061 debug_training = (u8)value;
1062 break;
1063
1064 case 0x103:
1065 debug_training_bist = (u8)value;
1066 break;
1067
1068 case 0x104:
1069 debug_centralization = (u8)value;
1070 break;
1071
1072 case 0x105:
1073 debug_training_ip = (u8)value;
1074 break;
1075
1076 case 0x106:
1077 debug_leveling = (u8)value;
1078 break;
1079
1080 case 0x107:
1081 debug_pbs = (u8)value;
1082 break;
1083
1084 case 0x108:
1085 debug_training_static = (u8)value;
1086 break;
1087
1088 case 0x109:
1089 debug_training_access = (u8)value;
1090 break;
1091
1092 case 0x112:
1093 *ptr = &start_pattern;
1094 break;
1095
1096 case 0x113:
1097 *ptr = &end_pattern;
1098 break;
1099
1100 default:
1101 if ((flag_id >= 0x200) && (flag_id < 0x210)) {
1102 if_id = flag_id - 0x200;
1103 *ptr = (u32 *)&(tm->interface_params
1104 [if_id].memory_freq);
1105 } else if ((flag_id >= 0x210) && (flag_id < 0x220)) {
1106 if_id = flag_id - 0x210;
1107 *ptr = (u32 *)&(tm->interface_params
1108 [if_id].speed_bin_index);
1109 } else if ((flag_id >= 0x220) && (flag_id < 0x230)) {
1110 if_id = flag_id - 0x220;
1111 *ptr = (u32 *)&(tm->interface_params
1112 [if_id].bus_width);
1113 } else if ((flag_id >= 0x230) && (flag_id < 0x240)) {
1114 if_id = flag_id - 0x230;
1115 *ptr = (u32 *)&(tm->interface_params
1116 [if_id].memory_size);
1117 } else if ((flag_id >= 0x240) && (flag_id < 0x250)) {
1118 if_id = flag_id - 0x240;
1119 *ptr = (u32 *)&(tm->interface_params
1120 [if_id].cas_l);
1121 } else if ((flag_id >= 0x250) && (flag_id < 0x260)) {
1122 if_id = flag_id - 0x250;
1123 *ptr = (u32 *)&(tm->interface_params
1124 [if_id].cas_wl);
1125 } else if ((flag_id >= 0x270) && (flag_id < 0x2cf)) {
1126 if_id = (flag_id - 0x270) / MAX_BUS_NUM;
1127 pup_id = (flag_id - 0x270) % MAX_BUS_NUM;
1128 *ptr = (u32 *)&(tm->interface_params[if_id].
1129 as_bus_params[pup_id].is_ck_swap);
1130 } else if ((flag_id >= 0x2d0) && (flag_id < 0x32f)) {
1131 if_id = (flag_id - 0x2d0) / MAX_BUS_NUM;
1132 pup_id = (flag_id - 0x2d0) % MAX_BUS_NUM;
1133 *ptr = (u32 *)&(tm->interface_params[if_id].
1134 as_bus_params[pup_id].is_dqs_swap);
1135 } else if ((flag_id >= 0x330) && (flag_id < 0x38f)) {
1136 if_id = (flag_id - 0x330) / MAX_BUS_NUM;
1137 pup_id = (flag_id - 0x330) % MAX_BUS_NUM;
1138 *ptr = (u32 *)&(tm->interface_params[if_id].
1139 as_bus_params[pup_id].cs_bitmask);
1140 } else if ((flag_id >= 0x390) && (flag_id < 0x3ef)) {
1141 if_id = (flag_id - 0x390) / MAX_BUS_NUM;
1142 pup_id = (flag_id - 0x390) % MAX_BUS_NUM;
1143 *ptr = (u32 *)&(tm->interface_params
1144 [if_id].as_bus_params
1145 [pup_id].mirror_enable_bitmask);
1146 } else if ((flag_id >= 0x500) && (flag_id <= 0x50f)) {
1147 tmp_val = flag_id - 0x320;
1148 *ptr = (u32 *)&(clamp_tbl[tmp_val]);
1149 } else {
1150 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1151 ("flag_id out of boundary %d\n",
1152 flag_id));
1153 return MV_BAD_PARAM;
1154 }
1155 }
1156
1157 return MV_OK;
1158}
1159
1160#ifndef EXCLUDE_SWITCH_DEBUG
1161/*
1162 * Print ADLL
1163 */
1164int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1165{
1166 u32 i, j;
1167 struct hws_topology_map *tm = ddr3_get_topology_map();
1168
Stefan Roese5ffceb82015-03-26 15:36:56 +01001169 for (j = 0; j < tm->num_of_bus_per_interface; j++) {
1170 VALIDATE_ACTIVE(tm->bus_act_mask, j);
1171 for (i = 0; i < MAX_INTERFACE_NUM; i++) {
1172 printf("%d ,",
1173 adll[i * tm->num_of_bus_per_interface + j]);
1174 }
1175 }
1176 printf("\n");
1177
1178 return MV_OK;
1179}
1180#endif
1181
1182/* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
1183static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
1184 u32 byte_index)
1185{
1186 u32 burst_cnt = 0, addr_offset, i_id;
1187 int b_is_fail = 0;
1188
1189 addr_offset =
1190 (byte_index ==
1191 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
1192 for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
1193 if ((p_src[burst_cnt] & addr_offset) !=
1194 (p_dst[burst_cnt] & addr_offset))
1195 b_is_fail = 1;
1196 }
1197
1198 if (b_is_fail == 1) {
1199 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1200 ("IF %d exp: ", if_id));
1201 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1202 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1203 ("0x%8x ", p_src[i_id]));
1204 }
1205 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1206 ("\n_i_f %d rcv: ", if_id));
1207 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1208 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1209 ("(0x%8x ", p_dst[i_id]));
1210 }
1211 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
1212 }
1213
1214 return b_is_fail;
1215}
1216
1217/* test_type = 0-tx , 1-rx */
1218int ddr3_tip_sweep_test(u32 dev_num, u32 test_type,
1219 u32 mem_addr, u32 is_modify_adll,
1220 u32 start_if, u32 end_if, u32 startpup, u32 endpup)
1221{
1222 u32 bus_cnt = 0, adll_val = 0, if_id, ui_prev_adll, ui_mask_bit,
1223 end_adll, start_adll;
1224 u32 reg_addr = 0;
1225 struct hws_topology_map *tm = ddr3_get_topology_map();
1226
Stefan Roese5ffceb82015-03-26 15:36:56 +01001227 if (test_type == 0) {
1228 reg_addr = 1;
1229 ui_mask_bit = 0x3f;
1230 start_adll = 0;
1231 end_adll = ui_mask_bit;
1232 } else {
1233 reg_addr = 3;
1234 ui_mask_bit = 0x1f;
1235 start_adll = 0;
1236 end_adll = ui_mask_bit;
1237 }
1238
1239 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
1240 ("==============================\n"));
1241 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
1242 ("Test type %d (0-tx, 1-rx)\n", test_type));
1243
1244 for (if_id = start_if; if_id <= end_if; if_id++) {
1245 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1246 for (bus_cnt = startpup; bus_cnt < endpup; bus_cnt++) {
1247 CHECK_STATUS(ddr3_tip_bus_read
1248 (dev_num, if_id, ACCESS_TYPE_UNICAST,
1249 bus_cnt, DDR_PHY_DATA, reg_addr,
1250 &ui_prev_adll));
1251
1252 for (adll_val = start_adll; adll_val <= end_adll;
1253 adll_val++) {
1254 if (is_modify_adll == 1) {
1255 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1256 (dev_num,
1257 ACCESS_TYPE_UNICAST,
1258 if_id, bus_cnt,
1259 DDR_PHY_DATA, reg_addr,
1260 adll_val, ui_mask_bit));
1261 }
1262 }
1263 if (is_modify_adll == 1) {
1264 CHECK_STATUS(ddr3_tip_bus_write
1265 (dev_num, ACCESS_TYPE_UNICAST,
1266 if_id, ACCESS_TYPE_UNICAST,
1267 bus_cnt, DDR_PHY_DATA, reg_addr,
1268 ui_prev_adll));
1269 }
1270 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("\n"));
1271 }
1272 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("\n"));
1273 }
1274
1275 return MV_OK;
1276}
1277
1278#ifndef EXCLUDE_SWITCH_DEBUG
1279/*
1280 * Sweep validation
1281 */
1282int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
1283 u32 mode)
1284{
1285 u32 pup = 0, start_pup = 0, end_pup = 0;
1286 u32 adll = 0;
1287 u32 res[MAX_INTERFACE_NUM] = { 0 };
1288 int if_id = 0;
1289 u32 adll_value = 0;
1290 int reg = (direction == 0) ? WRITE_CENTRALIZATION_PHY_REG :
1291 READ_CENTRALIZATION_PHY_REG;
1292 enum hws_access_type pup_access;
1293 u32 cs;
1294 u32 max_cs = hws_ddr3_tip_max_cs_get();
1295 struct hws_topology_map *tm = ddr3_get_topology_map();
1296
Stefan Roese5ffceb82015-03-26 15:36:56 +01001297 if (mode == 1) {
1298 /* per pup */
1299 start_pup = 0;
1300 end_pup = tm->num_of_bus_per_interface - 1;
1301 pup_access = ACCESS_TYPE_UNICAST;
1302 } else {
1303 start_pup = 0;
1304 end_pup = 0;
1305 pup_access = ACCESS_TYPE_MULTICAST;
1306 }
1307
1308 for (cs = 0; cs < max_cs; cs++) {
1309 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1310 for (if_id = 0;
1311 if_id <= MAX_INTERFACE_NUM - 1;
1312 if_id++) {
1313 VALIDATE_ACTIVE
1314 (tm->if_act_mask,
1315 if_id);
1316 for (pup = start_pup; pup <= end_pup; pup++) {
1317 ctrl_sweepres[adll][if_id][pup] =
1318 0;
1319 }
1320 }
1321 }
1322
1323 for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
1324 ctrl_adll[adll] = 0;
1325 /* Save DQS value(after algorithm run) */
1326 read_adll_value(ctrl_adll,
1327 (reg + (cs * CS_REGISTER_ADDR_OFFSET)),
1328 MASK_ALL_BITS);
1329
1330 /*
1331 * Sweep ADLL from 0:31 on all I/F on all Pup and perform
1332 * BIST on each stage.
1333 */
1334 for (pup = start_pup; pup <= end_pup; pup++) {
1335 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1336 adll_value =
1337 (direction == 0) ? (adll * 2) : adll;
1338 CHECK_STATUS(ddr3_tip_bus_write
1339 (dev_num, ACCESS_TYPE_MULTICAST, 0,
1340 pup_access, pup, DDR_PHY_DATA,
1341 reg + CS_REG_VALUE(cs),
1342 adll_value));
1343 hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1344 cs);
1345 /* ddr3_tip_reset_fifo_ptr(dev_num); */
1346 for (if_id = 0;
1347 if_id <= MAX_INTERFACE_NUM - 1;
1348 if_id++) {
1349 VALIDATE_ACTIVE
1350 (tm->if_act_mask,
1351 if_id);
1352 ctrl_sweepres[adll][if_id][pup]
1353 = res[if_id];
1354 if (mode == 1) {
1355 CHECK_STATUS
1356 (ddr3_tip_bus_write
1357 (dev_num,
1358 ACCESS_TYPE_UNICAST,
1359 if_id,
1360 ACCESS_TYPE_UNICAST,
1361 pup,
1362 DDR_PHY_DATA,
1363 reg + CS_REG_VALUE(cs),
1364 ctrl_adll[if_id *
1365 cs *
1366 tm->num_of_bus_per_interface
1367 + pup]));
1368 }
1369 }
1370 }
1371 }
1372 printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1373 ((direction == 0) ? "TX" : "RX"));
1374 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1375 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1376 if (mode == 1) {
1377 for (pup = start_pup; pup <= end_pup; pup++) {
1378 VALIDATE_ACTIVE(tm->bus_act_mask, pup);
1379 printf("I/F%d-PHY%d , ", if_id, pup);
1380 }
1381 } else {
1382 printf("I/F%d , ", if_id);
1383 }
1384 }
1385 printf("\n");
1386
1387 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1388 adll_value = (direction == 0) ? (adll * 2) : adll;
1389 printf("Final,%s, Sweep, Result, %d ,",
1390 ((direction == 0) ? "TX" : "RX"), adll_value);
1391
1392 for (if_id = 0;
1393 if_id <= MAX_INTERFACE_NUM - 1;
1394 if_id++) {
1395 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1396 for (pup = start_pup; pup <= end_pup; pup++) {
1397 printf("%d , ",
1398 ctrl_sweepres[adll][if_id]
1399 [pup]);
1400 }
1401 }
1402 printf("\n");
1403 }
1404
1405 /*
1406 * Write back to the phy the Rx DQS value, we store in
1407 * the beginning.
1408 */
1409 write_adll_value(ctrl_adll,
1410 (reg + cs * CS_REGISTER_ADDR_OFFSET));
1411 /* print adll results */
1412 read_adll_value(ctrl_adll, (reg + cs * CS_REGISTER_ADDR_OFFSET),
1413 MASK_ALL_BITS);
1414 printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1415 print_adll(dev_num, ctrl_adll);
1416 }
1417 ddr3_tip_reset_fifo_ptr(dev_num);
1418
1419 return 0;
1420}
1421
1422void print_topology(struct hws_topology_map *topology_db)
1423{
1424 u32 ui, uj;
1425
1426 printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
1427 printf("\tNum Bus: %d\n", topology_db->num_of_bus_per_interface);
1428 printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1429
1430 for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
1431 VALIDATE_ACTIVE(topology_db->if_act_mask, ui);
1432 printf("\n\tInterface ID: %d\n", ui);
1433 printf("\t\tDDR Frequency: %s\n",
1434 convert_freq(topology_db->
1435 interface_params[ui].memory_freq));
1436 printf("\t\tSpeed_bin: %d\n",
1437 topology_db->interface_params[ui].speed_bin_index);
1438 printf("\t\tBus_width: %d\n",
1439 (4 << topology_db->interface_params[ui].bus_width));
1440 printf("\t\tMem_size: %s\n",
1441 convert_mem_size(topology_db->
1442 interface_params[ui].memory_size));
1443 printf("\t\tCAS-WL: %d\n",
1444 topology_db->interface_params[ui].cas_wl);
1445 printf("\t\tCAS-L: %d\n",
1446 topology_db->interface_params[ui].cas_l);
1447 printf("\t\tTemperature: %d\n",
1448 topology_db->interface_params[ui].interface_temp);
1449 printf("\n");
1450 for (uj = 0; uj < 4; uj++) {
1451 printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1452 topology_db->interface_params[ui].
1453 as_bus_params[uj].cs_bitmask);
1454 printf("Mirror: 0x%x\t",
1455 topology_db->interface_params[ui].
1456 as_bus_params[uj].mirror_enable_bitmask);
1457 printf("DQS Swap is %s \t",
1458 (topology_db->
1459 interface_params[ui].as_bus_params[uj].
1460 is_dqs_swap == 1) ? "enabled" : "disabled");
1461 printf("Ck Swap:%s\t",
1462 (topology_db->
1463 interface_params[ui].as_bus_params[uj].
1464 is_ck_swap == 1) ? "enabled" : "disabled");
1465 printf("\n");
1466 }
1467 }
1468}
1469#endif
1470
1471/*
1472 * Execute XSB Test transaction (rd/wr/both)
1473 */
1474int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1475 u32 read_type, u32 burst_length)
1476{
1477 u32 seq = 0, if_id = 0, addr, cnt;
1478 int ret = MV_OK, ret_tmp;
1479 u32 data_read[MAX_INTERFACE_NUM];
1480 struct hws_topology_map *tm = ddr3_get_topology_map();
1481
1482 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1483 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1484 addr = mem_addr;
1485 for (cnt = 0; cnt <= burst_length; cnt++) {
1486 seq = (seq + 1) % 8;
1487 if (write_type != 0) {
1488 CHECK_STATUS(ddr3_tip_ext_write
1489 (dev_num, if_id, addr, 1,
1490 xsb_test_table[seq]));
1491 }
1492 if (read_type != 0) {
1493 CHECK_STATUS(ddr3_tip_ext_read
1494 (dev_num, if_id, addr, 1,
1495 data_read));
1496 }
1497 if ((read_type != 0) && (write_type != 0)) {
1498 ret_tmp =
1499 ddr3_tip_compare(if_id,
1500 xsb_test_table[seq],
1501 data_read,
1502 0xff);
1503 addr += (EXT_ACCESS_BURST_LENGTH * 4);
1504 ret = (ret != MV_OK) ? ret : ret_tmp;
1505 }
1506 }
1507 }
1508
1509 return ret;
1510}
1511
1512#else /*EXCLUDE_SWITCH_DEBUG */
1513
1514u32 rl_version = 1; /* 0 - old RL machine */
1515u32 vref = 0x4;
1516u32 start_xsb_offset = 0;
1517u8 cs_mask_reg[] = {
1518 0, 4, 8, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
1519};
1520
1521int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1522 u32 read_type, u32 burst_length)
1523{
1524 return MV_OK;
1525}
1526
1527#endif