blob: 0374a8438786c67f0780e4dc8b1f7cb61a929cef [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Stefan Roese5ffceb82015-03-26 15:36:56 +01002/*
3 * Copyright (C) Marvell International Ltd. and its affiliates
Stefan Roese5ffceb82015-03-26 15:36:56 +01004 */
5
Stefan Roese5ffceb82015-03-26 15:36:56 +01006#include "ddr3_init.h"
Chris Packham4bf81db2018-12-03 14:26:49 +13007#include "mv_ddr_training_db.h"
8#include "mv_ddr_regs.h"
Stefan Roese5ffceb82015-03-26 15:36:56 +01009
10u8 is_reg_dump = 0;
11u8 debug_pbs = DEBUG_LEVEL_ERROR;
12
13/*
14 * API to change flags outside of the lib
15 */
Chris Packham1a07d212018-05-10 13:28:29 +120016#if defined(SILENT_LIB)
17void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
18{
19 /* do nothing */
20}
21#else /* SILENT_LIB */
Stefan Roese5ffceb82015-03-26 15:36:56 +010022/* Debug flags for other Training modules */
23u8 debug_training_static = DEBUG_LEVEL_ERROR;
24u8 debug_training = DEBUG_LEVEL_ERROR;
25u8 debug_leveling = DEBUG_LEVEL_ERROR;
26u8 debug_centralization = DEBUG_LEVEL_ERROR;
27u8 debug_training_ip = DEBUG_LEVEL_ERROR;
28u8 debug_training_bist = DEBUG_LEVEL_ERROR;
29u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
30u8 debug_training_access = DEBUG_LEVEL_ERROR;
Chris Packham1a07d212018-05-10 13:28:29 +120031u8 debug_training_device = DEBUG_LEVEL_ERROR;
32
Tony Dinhe2c524b2023-01-18 19:03:04 -080033#if defined(CONFIG_DDR4)
34u8 debug_tap_tuning = DEBUG_LEVEL_ERROR;
35u8 debug_calibration = DEBUG_LEVEL_ERROR;
36u8 debug_ddr4_centralization = DEBUG_LEVEL_ERROR;
37u8 debug_dm_tuning = DEBUG_LEVEL_ERROR;
38#endif /* CONFIG_DDR4 */
Chris Packham1a07d212018-05-10 13:28:29 +120039
40void mv_ddr_user_log_level_set(enum ddr_lib_debug_block block)
41{
42 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
43 ddr3_hws_set_log_level(block, tm->debug_level);
44};
Stefan Roese5ffceb82015-03-26 15:36:56 +010045
46void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
47{
48 switch (block) {
49 case DEBUG_BLOCK_STATIC:
50 debug_training_static = level;
51 break;
52 case DEBUG_BLOCK_TRAINING_MAIN:
53 debug_training = level;
54 break;
55 case DEBUG_BLOCK_LEVELING:
56 debug_leveling = level;
57 break;
58 case DEBUG_BLOCK_CENTRALIZATION:
59 debug_centralization = level;
60 break;
61 case DEBUG_BLOCK_PBS:
62 debug_pbs = level;
63 break;
64 case DEBUG_BLOCK_ALG:
65 debug_training_hw_alg = level;
66 break;
67 case DEBUG_BLOCK_DEVICE:
Chris Packham1a07d212018-05-10 13:28:29 +120068 debug_training_device = level;
Stefan Roese5ffceb82015-03-26 15:36:56 +010069 break;
70 case DEBUG_BLOCK_ACCESS:
71 debug_training_access = level;
72 break;
73 case DEBUG_STAGES_REG_DUMP:
74 if (level == DEBUG_LEVEL_TRACE)
75 is_reg_dump = 1;
76 else
77 is_reg_dump = 0;
78 break;
Tony Dinhe2c524b2023-01-18 19:03:04 -080079#if defined(CONFIG_DDR4)
80 case DEBUG_TAP_TUNING_ENGINE:
81 debug_tap_tuning = level;
82 break;
83 case DEBUG_BLOCK_CALIBRATION:
84 debug_calibration = level;
85 break;
86 case DEBUG_BLOCK_DDR4_CENTRALIZATION:
87 debug_ddr4_centralization = level;
88 break;
89#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +010090 case DEBUG_BLOCK_ALL:
91 default:
92 debug_training_static = level;
93 debug_training = level;
94 debug_leveling = level;
95 debug_centralization = level;
96 debug_pbs = level;
97 debug_training_hw_alg = level;
98 debug_training_access = level;
Chris Packham1a07d212018-05-10 13:28:29 +120099 debug_training_device = level;
Tony Dinhe2c524b2023-01-18 19:03:04 -0800100#if defined(CONFIG_DDR4)
101 debug_tap_tuning = level;
102 debug_calibration = level;
103 debug_ddr4_centralization = level;
104#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100105 }
106}
Chris Packham1a07d212018-05-10 13:28:29 +1200107#endif /* SILENT_LIB */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100108
Chris Packham1a07d212018-05-10 13:28:29 +1200109#if defined(DDR_VIEWER_TOOL)
Chris Packham4bf81db2018-12-03 14:26:49 +1300110static char *convert_freq(enum mv_ddr_freq freq);
Chris Packham1a07d212018-05-10 13:28:29 +1200111#if defined(EXCLUDE_SWITCH_DEBUG)
112u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
113u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
114u32 ctrl_adll1[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
115u32 ctrl_level_phase[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
116#endif /* EXCLUDE_SWITCH_DEBUG */
117#endif /* DDR_VIEWER_TOOL */
118
119struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100120u8 is_default_centralization = 0;
121u8 is_tune_result = 0;
122u8 is_validate_window_per_if = 0;
123u8 is_validate_window_per_pup = 0;
124u8 sweep_cnt = 1;
125u32 is_bist_reset_bit = 1;
Chris Packham1a07d212018-05-10 13:28:29 +1200126u8 is_run_leveling_sweep_tests;
127
128static struct hws_xsb_info xsb_info[MAX_DEVICE_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100129
130/*
131 * Dump Dunit & Phy registers
132 */
133int ddr3_tip_reg_dump(u32 dev_num)
134{
135 u32 if_id, reg_addr, data_value, bus_id;
136 u32 read_data[MAX_INTERFACE_NUM];
Chris Packham1a07d212018-05-10 13:28:29 +1200137 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
138 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100139
140 printf("-- dunit registers --\n");
141 for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
142 printf("0x%x ", reg_addr);
143 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200144 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100145 CHECK_STATUS(ddr3_tip_if_read
146 (dev_num, ACCESS_TYPE_UNICAST,
147 if_id, reg_addr, read_data,
148 MASK_ALL_BITS));
149 printf("0x%x ", read_data[if_id]);
150 }
151 printf("\n");
152 }
153
154 printf("-- Phy registers --\n");
155 for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
156 printf("0x%x ", reg_addr);
157 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200158 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100159 for (bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200160 bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100161 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200162 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100163 CHECK_STATUS(ddr3_tip_bus_read
164 (dev_num, if_id,
165 ACCESS_TYPE_UNICAST, bus_id,
166 DDR_PHY_DATA, reg_addr,
167 &data_value));
168 printf("0x%x ", data_value);
169 }
170 for (bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200171 bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100172 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200173 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100174 CHECK_STATUS(ddr3_tip_bus_read
175 (dev_num, if_id,
176 ACCESS_TYPE_UNICAST, bus_id,
177 DDR_PHY_CONTROL, reg_addr,
178 &data_value));
179 printf("0x%x ", data_value);
180 }
181 }
182 printf("\n");
183 }
184
185 return MV_OK;
186}
187
188/*
189 * Register access func registration
190 */
191int ddr3_tip_init_config_func(u32 dev_num,
192 struct hws_tip_config_func_db *config_func)
193{
194 if (config_func == NULL)
195 return MV_BAD_PARAM;
196
197 memcpy(&config_func_info[dev_num], config_func,
198 sizeof(struct hws_tip_config_func_db));
199
200 return MV_OK;
201}
202
203/*
Stefan Roese5ffceb82015-03-26 15:36:56 +0100204 * Get training result info pointer
205 */
206enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
207{
208 return training_result[stage];
209}
210
211/*
212 * Device info read
213 */
214int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
215{
216 if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
217 return config_func_info[dev_num].
218 tip_get_device_info_func((u8) dev_num, info_ptr);
219 }
220
221 return MV_FAIL;
222}
223
Chris Packham1a07d212018-05-10 13:28:29 +1200224#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100225/*
226 * Convert freq to character string
227 */
Chris Packham4bf81db2018-12-03 14:26:49 +1300228static char *convert_freq(enum mv_ddr_freq freq)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100229{
230 switch (freq) {
Chris Packham4bf81db2018-12-03 14:26:49 +1300231 case MV_DDR_FREQ_LOW_FREQ:
232 return "MV_DDR_FREQ_LOW_FREQ";
Chris Packham1a07d212018-05-10 13:28:29 +1200233
Tony Dinhe2c524b2023-01-18 19:03:04 -0800234#if !defined(CONFIG_DDR4)
Chris Packham4bf81db2018-12-03 14:26:49 +1300235 case MV_DDR_FREQ_400:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100236 return "400";
237
Chris Packham4bf81db2018-12-03 14:26:49 +1300238 case MV_DDR_FREQ_533:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100239 return "533";
Tony Dinhe2c524b2023-01-18 19:03:04 -0800240#endif /* CONFIG_DDR4 */
Chris Packham1a07d212018-05-10 13:28:29 +1200241
Chris Packham4bf81db2018-12-03 14:26:49 +1300242 case MV_DDR_FREQ_667:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100243 return "667";
244
Chris Packham4bf81db2018-12-03 14:26:49 +1300245 case MV_DDR_FREQ_800:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100246 return "800";
247
Chris Packham4bf81db2018-12-03 14:26:49 +1300248 case MV_DDR_FREQ_933:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100249 return "933";
250
Chris Packham4bf81db2018-12-03 14:26:49 +1300251 case MV_DDR_FREQ_1066:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100252 return "1066";
Chris Packham1a07d212018-05-10 13:28:29 +1200253
Tony Dinhe2c524b2023-01-18 19:03:04 -0800254#if !defined(CONFIG_DDR4)
Chris Packham4bf81db2018-12-03 14:26:49 +1300255 case MV_DDR_FREQ_311:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100256 return "311";
257
Chris Packham4bf81db2018-12-03 14:26:49 +1300258 case MV_DDR_FREQ_333:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100259 return "333";
260
Chris Packham4bf81db2018-12-03 14:26:49 +1300261 case MV_DDR_FREQ_467:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100262 return "467";
263
Chris Packham4bf81db2018-12-03 14:26:49 +1300264 case MV_DDR_FREQ_850:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100265 return "850";
266
Chris Packham4bf81db2018-12-03 14:26:49 +1300267 case MV_DDR_FREQ_900:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100268 return "900";
269
Chris Packham4bf81db2018-12-03 14:26:49 +1300270 case MV_DDR_FREQ_360:
271 return "MV_DDR_FREQ_360";
Stefan Roese5ffceb82015-03-26 15:36:56 +0100272
Chris Packham4bf81db2018-12-03 14:26:49 +1300273 case MV_DDR_FREQ_1000:
274 return "MV_DDR_FREQ_1000";
Tony Dinhe2c524b2023-01-18 19:03:04 -0800275#endif /* CONFIG_DDR4 */
Chris Packham1a07d212018-05-10 13:28:29 +1200276
Stefan Roese5ffceb82015-03-26 15:36:56 +0100277 default:
278 return "Unknown Frequency";
279 }
280}
281
282/*
283 * Convert device ID to character string
284 */
285static char *convert_dev_id(u32 dev_id)
286{
287 switch (dev_id) {
288 case 0x6800:
289 return "A38xx";
290 case 0x6900:
291 return "A39XX";
292 case 0xf400:
293 return "AC3";
294 case 0xfc00:
295 return "BC2";
296
297 default:
298 return "Unknown Device";
299 }
300}
301
302/*
303 * Convert device ID to character string
304 */
305static char *convert_mem_size(u32 dev_id)
306{
307 switch (dev_id) {
308 case 0:
309 return "512 MB";
310 case 1:
311 return "1 GB";
312 case 2:
313 return "2 GB";
314 case 3:
315 return "4 GB";
316 case 4:
317 return "8 GB";
318
319 default:
320 return "wrong mem size";
321 }
322}
323
324int print_device_info(u8 dev_num)
325{
326 struct ddr3_device_info info_ptr;
Chris Packham1a07d212018-05-10 13:28:29 +1200327 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100328
329 CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
330 printf("=== DDR setup START===\n");
331 printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
332 printf("\tDDR3 CK delay: %d\n", info_ptr.ck_delay);
333 print_topology(tm);
334 printf("=== DDR setup END===\n");
335
336 return MV_OK;
337}
338
339void hws_ddr3_tip_sweep_test(int enable)
340{
341 if (enable) {
342 is_validate_window_per_if = 1;
343 is_validate_window_per_pup = 1;
344 debug_training = DEBUG_LEVEL_TRACE;
345 } else {
346 is_validate_window_per_if = 0;
347 is_validate_window_per_pup = 0;
348 }
349}
Chris Packham1a07d212018-05-10 13:28:29 +1200350#endif /* DDR_VIEWER_TOOL */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100351
352char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
353{
354 switch (tune_result) {
355 case TEST_FAILED:
356 return "FAILED";
357 case TEST_SUCCESS:
358 return "PASS";
359 case NO_TEST_DONE:
360 return "NOT COMPLETED";
361 default:
362 return "Un-KNOWN";
363 }
364}
365
366/*
367 * Print log info
368 */
369int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
370{
371 u32 if_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200372 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100373
Chris Packham1a07d212018-05-10 13:28:29 +1200374#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100375 if ((is_validate_window_per_if != 0) ||
376 (is_validate_window_per_pup != 0)) {
377 u32 is_pup_log = 0;
Chris Packham4bf81db2018-12-03 14:26:49 +1300378 enum mv_ddr_freq freq;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100379
380 freq = tm->interface_params[first_active_if].memory_freq;
381
382 is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
383 printf("===VALIDATE WINDOW LOG START===\n");
384 printf("DDR Frequency: %s ======\n", convert_freq(freq));
385 /* print sweep windows */
386 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
387 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
Chris Packham1a07d212018-05-10 13:28:29 +1200388#if defined(EXCLUDE_SWITCH_DEBUG)
389 if (is_run_leveling_sweep_tests == 1) {
390 ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
391 ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
392 }
393#endif /* EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100394 ddr3_tip_print_all_pbs_result(dev_num);
395 ddr3_tip_print_wl_supp_result(dev_num);
396 printf("===VALIDATE WINDOW LOG END ===\n");
397 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
398 ddr3_tip_reg_dump(dev_num);
399 }
Chris Packham1a07d212018-05-10 13:28:29 +1200400#endif /* DDR_VIEWER_TOOL */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100401
Marek Behúnbbafabc2024-06-18 17:34:25 +0200402 /* return early if we won't print anything anyway */
403 if (
404#if defined(SILENT_LIB)
405 1 ||
406#endif
407 debug_training < DEBUG_LEVEL_INFO) {
408 return MV_OK;
409 }
410
Stefan Roese5ffceb82015-03-26 15:36:56 +0100411 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200412 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100413
414 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
415 ("IF %d Status:\n", if_id));
416
417 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
418 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
419 ("\tInit Controller: %s\n",
420 ddr3_tip_convert_tune_result
421 (training_result[INIT_CONTROLLER]
422 [if_id])));
423 }
424 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
425 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
426 ("\tLow freq Config: %s\n",
427 ddr3_tip_convert_tune_result
428 (training_result[SET_LOW_FREQ]
429 [if_id])));
430 }
431 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
432 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
433 ("\tLoad Pattern: %s\n",
434 ddr3_tip_convert_tune_result
435 (training_result[LOAD_PATTERN]
436 [if_id])));
437 }
438 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
439 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
440 ("\tMedium freq Config: %s\n",
441 ddr3_tip_convert_tune_result
442 (training_result[SET_MEDIUM_FREQ]
443 [if_id])));
444 }
445 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
446 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
447 ("\tWL: %s\n",
448 ddr3_tip_convert_tune_result
449 (training_result[WRITE_LEVELING]
450 [if_id])));
451 }
452 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
453 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
454 ("\tLoad Pattern: %s\n",
455 ddr3_tip_convert_tune_result
456 (training_result[LOAD_PATTERN_2]
457 [if_id])));
458 }
459 if (mask_tune_func & READ_LEVELING_MASK_BIT) {
460 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
461 ("\tRL: %s\n",
462 ddr3_tip_convert_tune_result
463 (training_result[READ_LEVELING]
464 [if_id])));
465 }
466 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
467 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
468 ("\tWL Supp: %s\n",
469 ddr3_tip_convert_tune_result
470 (training_result[WRITE_LEVELING_SUPP]
471 [if_id])));
472 }
473 if (mask_tune_func & PBS_RX_MASK_BIT) {
474 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
475 ("\tPBS RX: %s\n",
476 ddr3_tip_convert_tune_result
477 (training_result[PBS_RX]
478 [if_id])));
479 }
480 if (mask_tune_func & PBS_TX_MASK_BIT) {
481 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
482 ("\tPBS TX: %s\n",
483 ddr3_tip_convert_tune_result
484 (training_result[PBS_TX]
485 [if_id])));
486 }
487 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
488 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
489 ("\tTarget freq Config: %s\n",
490 ddr3_tip_convert_tune_result
491 (training_result[SET_TARGET_FREQ]
492 [if_id])));
493 }
494 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
495 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
496 ("\tWL TF: %s\n",
497 ddr3_tip_convert_tune_result
498 (training_result[WRITE_LEVELING_TF]
499 [if_id])));
500 }
Tony Dinhe2c524b2023-01-18 19:03:04 -0800501#if !defined(CONFIG_DDR4)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100502 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
503 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
504 ("\tRL TF: %s\n",
505 ddr3_tip_convert_tune_result
506 (training_result[READ_LEVELING_TF]
507 [if_id])));
508 }
Tony Dinhe2c524b2023-01-18 19:03:04 -0800509#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100510 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
511 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
512 ("\tWL TF Supp: %s\n",
513 ddr3_tip_convert_tune_result
514 (training_result
515 [WRITE_LEVELING_SUPP_TF]
516 [if_id])));
517 }
518 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
519 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
520 ("\tCentr RX: %s\n",
521 ddr3_tip_convert_tune_result
522 (training_result[CENTRALIZATION_RX]
523 [if_id])));
524 }
525 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
526 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
527 ("\tVREF_CALIBRATION: %s\n",
528 ddr3_tip_convert_tune_result
529 (training_result[VREF_CALIBRATION]
530 [if_id])));
531 }
532 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
533 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
534 ("\tCentr TX: %s\n",
535 ddr3_tip_convert_tune_result
536 (training_result[CENTRALIZATION_TX]
537 [if_id])));
538 }
Tony Dinhe2c524b2023-01-18 19:03:04 -0800539#if defined(CONFIG_DDR4)
540 if (mask_tune_func & SW_READ_LEVELING_MASK_BIT) {
541 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
542 ("\tSW RL TF: %s\n",
543 ddr3_tip_convert_tune_result
544 (training_result[SW_READ_LEVELING]
545 [if_id])));
546 }
547 if (mask_tune_func & RECEIVER_CALIBRATION_MASK_BIT) {
548 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
549 ("\tRX CAL: %s\n",
550 ddr3_tip_convert_tune_result
551 (training_result[RECEIVER_CALIBRATION]
552 [if_id])));
553 }
554 if (mask_tune_func & WL_PHASE_CORRECTION_MASK_BIT) {
555 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
556 ("\tWL PHASE CORRECT: %s\n",
557 ddr3_tip_convert_tune_result
558 (training_result[WL_PHASE_CORRECTION]
559 [if_id])));
560 }
561 if (mask_tune_func & DQ_VREF_CALIBRATION_MASK_BIT) {
562 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
563 ("\tDQ VREF CAL: %s\n",
564 ddr3_tip_convert_tune_result
565 (training_result[DQ_VREF_CALIBRATION]
566 [if_id])));
567 }
568 if (mask_tune_func & DQ_MAPPING_MASK_BIT) {
569 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
570 ("\tDQ MAP: %s\n",
571 ddr3_tip_convert_tune_result
572 (training_result[DQ_MAPPING]
573 [if_id])));
574 }
575#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100576 }
577
578 return MV_OK;
579}
580
Chris Packham1a07d212018-05-10 13:28:29 +1200581#if !defined(EXCLUDE_DEBUG_PRINTS)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100582/*
583 * Print stability log info
584 */
585int ddr3_tip_print_stability_log(u32 dev_num)
586{
587 u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
588 u32 reg_data;
Tony Dinhe2c524b2023-01-18 19:03:04 -0800589#if defined(CONFIG_DDR4)
590 u32 reg_data1;
591#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100592 u32 read_data[MAX_INTERFACE_NUM];
Chris Packham4bf81db2018-12-03 14:26:49 +1300593 unsigned int max_cs = mv_ddr_cs_num_get();
Chris Packham1a07d212018-05-10 13:28:29 +1200594 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100595
596 /* Title print */
597 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200598 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100599 printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
600 for (csindex = 0; csindex < max_cs; csindex++) {
601 printf("CS%d , ", csindex);
602 printf("\n");
Chris Packham1a07d212018-05-10 13:28:29 +1200603 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Tony Dinhe2c524b2023-01-18 19:03:04 -0800604#if defined(CONFIG_DDR4)
605 printf("DminTx, AreaTx, DminRx, AreaRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, CenTx, CenRx, Vref, DQVref,");
606 for (idx = 0; idx < 11; idx++)
607 printf("DC-Pad%d,", idx);
608#else /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100609 printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
Tony Dinhe2c524b2023-01-18 19:03:04 -0800610#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100611 printf("\t\t");
612 for (idx = 0; idx < 11; idx++)
613 printf("PBSTx-Pad%d,", idx);
614 printf("\t\t");
615 for (idx = 0; idx < 11; idx++)
616 printf("PBSRx-Pad%d,", idx);
617 }
618 }
619 printf("\n");
620
621 /* Data print */
622 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200623 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100624
625 printf("Data: %d,%d,", if_id,
626 (config_func_info[dev_num].tip_get_temperature != NULL)
627 ? (config_func_info[dev_num].
628 tip_get_temperature(dev_num)) : (0));
629
630 CHECK_STATUS(ddr3_tip_if_read
631 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
632 read_data, MASK_ALL_BITS));
633 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
634 ((read_data[if_id] & 0xfc00) >> 10));
635 CHECK_STATUS(ddr3_tip_if_read
636 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
637 read_data, MASK_ALL_BITS));
638 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
639 ((read_data[if_id] & 0xfc00) >> 10));
640 CHECK_STATUS(ddr3_tip_if_read
641 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
642 read_data, MASK_ALL_BITS));
643 printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
644 ((read_data[if_id] & 0xfc00000) >> 22));
645
646 for (csindex = 0; csindex < max_cs; csindex++) {
647 printf("CS%d , ", csindex);
648 for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
649 printf("\n");
Chris Packham1a07d212018-05-10 13:28:29 +1200650 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Tony Dinhe2c524b2023-01-18 19:03:04 -0800651#if defined(CONFIG_DDR4)
652 /* DminTx, areaTX */
653 ddr3_tip_bus_read(dev_num, if_id,
654 ACCESS_TYPE_UNICAST,
655 bus_id, DDR_PHY_DATA,
656 RESULT_PHY_REG +
657 csindex, &reg_data);
658 ddr3_tip_bus_read(dev_num, if_id,
659 ACCESS_TYPE_UNICAST,
660 dmin_phy_reg_table
661 [csindex * 5 + bus_id][0],
662 DDR_PHY_CONTROL,
663 dmin_phy_reg_table
664 [csindex * 5 + bus_id][1],
665 &reg_data1);
666 printf("%d,%d,", 2 * (reg_data1 & 0xFF),
667 reg_data);
668 /* DminRx, areaRX */
669 ddr3_tip_bus_read(dev_num, if_id,
670 ACCESS_TYPE_UNICAST,
671 bus_id, DDR_PHY_DATA,
672 RESULT_PHY_REG +
673 csindex + 4, &reg_data);
674 ddr3_tip_bus_read(dev_num, if_id,
675 ACCESS_TYPE_UNICAST,
676 dmin_phy_reg_table
677 [csindex * 5 + bus_id][0],
678 DDR_PHY_CONTROL,
679 dmin_phy_reg_table
680 [csindex * 5 + bus_id][1],
681 &reg_data1);
682 printf("%d,%d,", 2 * (reg_data1 >> 8),
683 reg_data);
684#else /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100685 ddr3_tip_bus_read(dev_num, if_id,
686 ACCESS_TYPE_UNICAST,
687 bus_id, DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200688 RESULT_PHY_REG +
Stefan Roese5ffceb82015-03-26 15:36:56 +0100689 csindex, &reg_data);
690 printf("%d,%d,", (reg_data & 0x1f),
691 ((reg_data & 0x3e0) >> 5));
Tony Dinhe2c524b2023-01-18 19:03:04 -0800692#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100693 /* WL */
694 ddr3_tip_bus_read(dev_num, if_id,
695 ACCESS_TYPE_UNICAST,
696 bus_id, DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200697 WL_PHY_REG(csindex),
698 &reg_data);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100699 printf("%d,%d,%d,",
700 (reg_data & 0x1f) +
701 ((reg_data & 0x1c0) >> 6) * 32,
702 (reg_data & 0x1f),
703 (reg_data & 0x1c0) >> 6);
704 /* RL */
705 CHECK_STATUS(ddr3_tip_if_read
706 (dev_num, ACCESS_TYPE_UNICAST,
707 if_id,
Chris Packham1a07d212018-05-10 13:28:29 +1200708 RD_DATA_SMPL_DLYS_REG,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100709 read_data, MASK_ALL_BITS));
710 read_data[if_id] =
711 (read_data[if_id] &
Chris Packham1a07d212018-05-10 13:28:29 +1200712 (0x1f << (8 * csindex))) >>
713 (8 * csindex);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100714 ddr3_tip_bus_read(dev_num, if_id,
715 ACCESS_TYPE_UNICAST, bus_id,
716 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200717 RL_PHY_REG(csindex),
Stefan Roese5ffceb82015-03-26 15:36:56 +0100718 &reg_data);
719 printf("%d,%d,%d,%d,",
720 (reg_data & 0x1f) +
721 ((reg_data & 0x1c0) >> 6) * 32 +
722 read_data[if_id] * 64,
723 (reg_data & 0x1f),
724 ((reg_data & 0x1c0) >> 6),
725 read_data[if_id]);
726 /* Centralization */
727 ddr3_tip_bus_read(dev_num, if_id,
728 ACCESS_TYPE_UNICAST, bus_id,
729 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200730 CTX_PHY_REG(csindex),
731 &reg_data);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100732 printf("%d,", (reg_data & 0x3f));
733 ddr3_tip_bus_read(dev_num, if_id,
734 ACCESS_TYPE_UNICAST, bus_id,
735 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200736 CRX_PHY_REG(csindex),
737 &reg_data);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100738 printf("%d,", (reg_data & 0x1f));
739 /* Vref */
740 ddr3_tip_bus_read(dev_num, if_id,
741 ACCESS_TYPE_UNICAST, bus_id,
742 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200743 PAD_CFG_PHY_REG,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100744 &reg_data);
745 printf("%d,", (reg_data & 0x7));
746 /* DQVref */
747 /* Need to add the Read Function from device */
748 printf("%d,", 0);
Tony Dinhe2c524b2023-01-18 19:03:04 -0800749#if defined(CONFIG_DDR4)
750 printf("\t\t");
751 for (idx = 0; idx < 11; idx++) {
752 ddr3_tip_bus_read(dev_num, if_id,
753 ACCESS_TYPE_UNICAST,
754 bus_id, DDR_PHY_DATA,
755 0xd0 + 12 * csindex +
756 idx, &reg_data);
757 printf("%d,", (reg_data & 0x3f));
758 }
759#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100760 printf("\t\t");
761 for (idx = 0; idx < 11; idx++) {
762 ddr3_tip_bus_read(dev_num, if_id,
763 ACCESS_TYPE_UNICAST,
764 bus_id, DDR_PHY_DATA,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100765 0x10 +
766 16 * csindex +
767 idx, &reg_data);
768 printf("%d,", (reg_data & 0x3f));
769 }
770 printf("\t\t");
771 for (idx = 0; idx < 11; idx++) {
772 ddr3_tip_bus_read(dev_num, if_id,
773 ACCESS_TYPE_UNICAST,
774 bus_id, DDR_PHY_DATA,
775 0x50 +
776 16 * csindex +
777 idx, &reg_data);
778 printf("%d,", (reg_data & 0x3f));
779 }
780 }
781 }
782 }
783 printf("\n");
784
785 return MV_OK;
786}
Chris Packham1a07d212018-05-10 13:28:29 +1200787#endif /* EXCLUDE_DEBUG_PRINTS */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100788
789/*
790 * Register XSB information
791 */
792int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
793{
794 memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
795 return MV_OK;
796}
797
798/*
799 * Read ADLL Value
800 */
Chris Packham1a07d212018-05-10 13:28:29 +1200801int ddr3_tip_read_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
802 u32 reg_addr, u32 mask)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100803{
804 u32 data_value;
805 u32 if_id = 0, bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200806 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
807 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100808
809 /*
810 * multi CS support - reg_addr is calucalated in calling function
811 * with CS offset
812 */
813 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200814 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
815 for (bus_id = 0; bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100816 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200817 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100818 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
819 ACCESS_TYPE_UNICAST,
820 bus_id,
821 DDR_PHY_DATA, reg_addr,
822 &data_value));
823 pup_values[if_id *
Chris Packham1a07d212018-05-10 13:28:29 +1200824 octets_per_if_num + bus_id] =
Stefan Roese5ffceb82015-03-26 15:36:56 +0100825 data_value & mask;
826 }
827 }
828
829 return 0;
830}
831
832/*
833 * Write ADLL Value
834 */
Chris Packham1a07d212018-05-10 13:28:29 +1200835int ddr3_tip_write_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
836 u32 reg_addr)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100837{
838 u32 if_id = 0, bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200839 u32 data;
840 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
841 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100842
843 /*
844 * multi CS support - reg_addr is calucalated in calling function
845 * with CS offset
846 */
847 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200848 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
849 for (bus_id = 0; bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100850 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200851 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100852 data = pup_values[if_id *
Chris Packham1a07d212018-05-10 13:28:29 +1200853 octets_per_if_num +
Stefan Roese5ffceb82015-03-26 15:36:56 +0100854 bus_id];
855 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
856 ACCESS_TYPE_UNICAST,
857 if_id,
858 ACCESS_TYPE_UNICAST,
859 bus_id, DDR_PHY_DATA,
860 reg_addr, data));
861 }
862 }
863
864 return 0;
865}
866
Chris Packham1a07d212018-05-10 13:28:29 +1200867/**
868 * Read Phase Value
869 */
870int read_phase_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
871 int reg_addr, u32 mask)
872{
873 u32 data_value;
874 u32 if_id = 0, bus_id = 0;
875 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
876 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
877
878 /* multi CS support - reg_addr is calucalated in calling function with CS offset */
879 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
880 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
881 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) {
882 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
883 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
884 ACCESS_TYPE_UNICAST,
885 bus_id,
886 DDR_PHY_DATA, reg_addr,
887 &data_value));
888 pup_values[if_id * octets_per_if_num + bus_id] = data_value & mask;
889 }
890 }
891
892 return 0;
893}
894
895/**
896 * Write Leveling Value
897 */
898int write_leveling_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
899 u32 pup_ph_values[MAX_INTERFACE_NUM * MAX_BUS_NUM], int reg_addr)
900{
901 u32 if_id = 0, bus_id = 0;
902 u32 data;
903 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
904 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
905
906 /* multi CS support - reg_addr is calucalated in calling function with CS offset */
907 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
908 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
909 for (bus_id = 0 ; bus_id < octets_per_if_num ; bus_id++) {
910 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
911 data = pup_values[if_id * octets_per_if_num + bus_id] +
912 pup_ph_values[if_id * octets_per_if_num + bus_id];
913 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
914 ACCESS_TYPE_UNICAST,
915 if_id,
916 ACCESS_TYPE_UNICAST,
917 bus_id,
918 DDR_PHY_DATA,
919 reg_addr,
920 data));
921 }
922 }
923
924 return 0;
925}
926
927#if !defined(EXCLUDE_SWITCH_DEBUG)
928struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100929u32 start_xsb_offset = 0;
930u8 is_rl_old = 0;
931u8 is_freq_old = 0;
932u8 is_dfs_disabled = 0;
933u32 default_centrlization_value = 0x12;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100934u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
935 rl_test = 0, reset_read_fifo = 0;
936int debug_acc = 0;
937u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
938u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100939
940u32 xsb_test_table[][8] = {
941 {0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
942 0x66666666, 0x77777777},
943 {0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
944 0xeeeeeeee, 0xffffffff},
945 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
946 0x00000000, 0xffffffff},
947 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
948 0x00000000, 0xffffffff},
949 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
950 0x00000000, 0xffffffff},
951 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
952 0x00000000, 0xffffffff},
953 {0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
954 0xffffffff, 0xffffffff},
955 {0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
956 0x00000000, 0x00000000},
957 {0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
958 0xffffffff, 0xffffffff}
959};
960
Stefan Roese5ffceb82015-03-26 15:36:56 +0100961int ddr3_tip_print_adll(void)
962{
963 u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200964 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
965 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100966
967 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200968 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
969 for (bus_cnt = 0; bus_cnt < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100970 bus_cnt++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200971 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100972 CHECK_STATUS(ddr3_tip_bus_read
973 (dev_num, if_id,
974 ACCESS_TYPE_UNICAST, bus_cnt,
975 DDR_PHY_DATA, 0x1, &data_p1));
976 CHECK_STATUS(ddr3_tip_bus_read
977 (dev_num, if_id, ACCESS_TYPE_UNICAST,
978 bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
979 CHECK_STATUS(ddr3_tip_bus_read
980 (dev_num, if_id, ACCESS_TYPE_UNICAST,
981 bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
982 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
983 (" IF %d bus_cnt %d phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
984 if_id, bus_cnt, data_p1, data_p2,
985 ui_data3));
986 }
987 }
988
989 return MV_OK;
990}
991
Chris Packham1a07d212018-05-10 13:28:29 +1200992#endif /* EXCLUDE_SWITCH_DEBUG */
993
994#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100995/*
996 * Print ADLL
997 */
998int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
999{
1000 u32 i, j;
Chris Packham1a07d212018-05-10 13:28:29 +12001001 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1002 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +01001003
Chris Packham1a07d212018-05-10 13:28:29 +12001004 for (j = 0; j < octets_per_if_num; j++) {
1005 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1006 for (i = 0; i < MAX_INTERFACE_NUM; i++)
1007 printf("%d ,", adll[i * octets_per_if_num + j]);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001008 }
1009 printf("\n");
1010
1011 return MV_OK;
1012}
Stefan Roese5ffceb82015-03-26 15:36:56 +01001013
Chris Packham1a07d212018-05-10 13:28:29 +12001014int print_ph(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1015{
1016 u32 i, j;
1017 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1018 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1019
1020 for (j = 0; j < octets_per_if_num; j++) {
1021 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1022 for (i = 0; i < MAX_INTERFACE_NUM; i++)
1023 printf("%d ,", adll[i * octets_per_if_num + j] >> 6);
1024 }
1025 printf("\n");
1026
1027 return MV_OK;
1028}
1029#endif /* DDR_VIEWER_TOOL */
1030
1031#if !defined(EXCLUDE_SWITCH_DEBUG)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001032/* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
1033static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
1034 u32 byte_index)
1035{
1036 u32 burst_cnt = 0, addr_offset, i_id;
1037 int b_is_fail = 0;
1038
1039 addr_offset =
1040 (byte_index ==
1041 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
1042 for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
1043 if ((p_src[burst_cnt] & addr_offset) !=
Chris Packham1a07d212018-05-10 13:28:29 +12001044 (p_dst[if_id] & addr_offset))
Stefan Roese5ffceb82015-03-26 15:36:56 +01001045 b_is_fail = 1;
1046 }
1047
1048 if (b_is_fail == 1) {
1049 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1050 ("IF %d exp: ", if_id));
1051 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1052 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1053 ("0x%8x ", p_src[i_id]));
1054 }
1055 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1056 ("\n_i_f %d rcv: ", if_id));
1057 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1058 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1059 ("(0x%8x ", p_dst[i_id]));
1060 }
1061 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
1062 }
1063
1064 return b_is_fail;
1065}
Chris Packham1a07d212018-05-10 13:28:29 +12001066#endif /* EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001067
Chris Packham1a07d212018-05-10 13:28:29 +12001068#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001069/*
1070 * Sweep validation
1071 */
1072int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
1073 u32 mode)
1074{
1075 u32 pup = 0, start_pup = 0, end_pup = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001076 u32 adll = 0, rep = 0, pattern_idx = 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001077 u32 res[MAX_INTERFACE_NUM] = { 0 };
1078 int if_id = 0;
1079 u32 adll_value = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001080 u32 reg;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001081 enum hws_access_type pup_access;
1082 u32 cs;
Chris Packham4bf81db2018-12-03 14:26:49 +13001083 unsigned int max_cs = mv_ddr_cs_num_get();
Chris Packham1a07d212018-05-10 13:28:29 +12001084 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1085 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1086
1087 repeat_num = 2;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001088
Stefan Roese5ffceb82015-03-26 15:36:56 +01001089 if (mode == 1) {
1090 /* per pup */
1091 start_pup = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001092 end_pup = octets_per_if_num - 1;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001093 pup_access = ACCESS_TYPE_UNICAST;
1094 } else {
1095 start_pup = 0;
1096 end_pup = 0;
1097 pup_access = ACCESS_TYPE_MULTICAST;
1098 }
1099
1100 for (cs = 0; cs < max_cs; cs++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001101 reg = (direction == 0) ? CTX_PHY_REG(cs) : CRX_PHY_REG(cs);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001102 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1103 for (if_id = 0;
1104 if_id <= MAX_INTERFACE_NUM - 1;
1105 if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001106 VALIDATE_IF_ACTIVE
Stefan Roese5ffceb82015-03-26 15:36:56 +01001107 (tm->if_act_mask,
1108 if_id);
1109 for (pup = start_pup; pup <= end_pup; pup++) {
1110 ctrl_sweepres[adll][if_id][pup] =
1111 0;
1112 }
1113 }
1114 }
1115
1116 for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
1117 ctrl_adll[adll] = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001118 /* Save DQS value(after algorithm run) */
1119 ddr3_tip_read_adll_value(dev_num, ctrl_adll,
1120 reg, MASK_ALL_BITS);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001121
1122 /*
1123 * Sweep ADLL from 0:31 on all I/F on all Pup and perform
1124 * BIST on each stage.
1125 */
1126 for (pup = start_pup; pup <= end_pup; pup++) {
1127 for (adll = 0; adll < ADLL_LENGTH; adll++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001128 for (rep = 0; rep < repeat_num; rep++) {
1129 for (pattern_idx = PATTERN_KILLER_DQ0;
1130 pattern_idx < PATTERN_LAST;
1131 pattern_idx++) {
1132 adll_value =
1133 (direction == 0) ? (adll * 2) : adll;
1134 CHECK_STATUS(ddr3_tip_bus_write
1135 (dev_num, ACCESS_TYPE_MULTICAST, 0,
1136 pup_access, pup, DDR_PHY_DATA,
1137 reg, adll_value));
1138 hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1139 cs);
1140 /* ddr3_tip_reset_fifo_ptr(dev_num); */
1141 for (if_id = 0;
1142 if_id < MAX_INTERFACE_NUM;
1143 if_id++) {
1144 VALIDATE_IF_ACTIVE
1145 (tm->if_act_mask,
1146 if_id);
1147 ctrl_sweepres[adll][if_id][pup]
1148 += res[if_id];
1149 if (mode == 1) {
1150 CHECK_STATUS
1151 (ddr3_tip_bus_write
1152 (dev_num,
1153 ACCESS_TYPE_UNICAST,
1154 if_id,
1155 ACCESS_TYPE_UNICAST,
1156 pup,
1157 DDR_PHY_DATA,
1158 reg,
1159 ctrl_adll[if_id *
1160 cs *
1161 octets_per_if_num
1162 + pup]));
1163 }
1164 }
Stefan Roese5ffceb82015-03-26 15:36:56 +01001165 }
1166 }
1167 }
1168 }
1169 printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1170 ((direction == 0) ? "TX" : "RX"));
1171 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001172 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001173 if (mode == 1) {
1174 for (pup = start_pup; pup <= end_pup; pup++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001175 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001176 printf("I/F%d-PHY%d , ", if_id, pup);
1177 }
1178 } else {
1179 printf("I/F%d , ", if_id);
1180 }
1181 }
1182 printf("\n");
1183
1184 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1185 adll_value = (direction == 0) ? (adll * 2) : adll;
1186 printf("Final,%s, Sweep, Result, %d ,",
1187 ((direction == 0) ? "TX" : "RX"), adll_value);
1188
1189 for (if_id = 0;
1190 if_id <= MAX_INTERFACE_NUM - 1;
1191 if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001192 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001193 for (pup = start_pup; pup <= end_pup; pup++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001194 printf("%8d , ",
Stefan Roese5ffceb82015-03-26 15:36:56 +01001195 ctrl_sweepres[adll][if_id]
1196 [pup]);
1197 }
1198 }
1199 printf("\n");
1200 }
1201
1202 /*
1203 * Write back to the phy the Rx DQS value, we store in
1204 * the beginning.
1205 */
Chris Packham1a07d212018-05-10 13:28:29 +12001206 ddr3_tip_write_adll_value(dev_num, ctrl_adll, reg);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001207 /* print adll results */
Chris Packham1a07d212018-05-10 13:28:29 +12001208 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001209 printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1210 print_adll(dev_num, ctrl_adll);
Chris Packham1a07d212018-05-10 13:28:29 +12001211 }
1212 ddr3_tip_reset_fifo_ptr(dev_num);
1213
1214 return 0;
1215}
1216
1217#if defined(EXCLUDE_SWITCH_DEBUG)
1218int ddr3_tip_run_leveling_sweep_test(int dev_num, u32 repeat_num,
1219 u32 direction, u32 mode)
1220{
1221 u32 pup = 0, start_pup = 0, end_pup = 0, start_adll = 0;
1222 u32 adll = 0, rep = 0, pattern_idx = 0;
1223 u32 read_data[MAX_INTERFACE_NUM];
1224 u32 res[MAX_INTERFACE_NUM] = { 0 };
1225 int if_id = 0, gap = 0;
1226 u32 adll_value = 0;
1227 u32 reg;
1228 enum hws_access_type pup_access;
1229 u32 cs;
Chris Packham4bf81db2018-12-03 14:26:49 +13001230 unsigned int max_cs = mv_ddr_cs_num_get();
Chris Packham1a07d212018-05-10 13:28:29 +12001231 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1232 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1233
1234 if (mode == 1) { /* per pup */
1235 start_pup = 0;
1236 end_pup = octets_per_if_num - 1;
1237 pup_access = ACCESS_TYPE_UNICAST;
1238 } else {
1239 start_pup = 0;
1240 end_pup = 0;
1241 pup_access = ACCESS_TYPE_MULTICAST;
1242 }
1243
1244 for (cs = 0; cs < max_cs; cs++) {
1245 reg = (direction == 0) ? WL_PHY_REG(cs) : RL_PHY_REG(cs);
1246 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1247 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1248 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1249 for (pup = start_pup; pup <= end_pup; pup++)
1250 ctrl_sweepres[adll][if_id][pup] = 0;
1251 }
1252 }
1253
1254 for (adll = 0; adll < MAX_INTERFACE_NUM * MAX_BUS_NUM; adll++) {
1255 ctrl_adll[adll] = 0;
1256 ctrl_level_phase[adll] = 0;
1257 ctrl_adll1[adll] = 0;
1258 }
1259
1260 /* save leveling value after running algorithm */
1261 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, 0x1f);
1262 read_phase_value(dev_num, ctrl_level_phase, reg, 0x7 << 6);
1263
1264 if (direction == 0)
1265 ddr3_tip_read_adll_value(dev_num, ctrl_adll1,
1266 CTX_PHY_REG(cs), MASK_ALL_BITS);
1267
1268 /* Sweep ADLL from 0 to 31 on all interfaces, all pups,
1269 * and perform BIST on each stage
1270 */
1271 for (pup = start_pup; pup <= end_pup; pup++) {
1272 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1273 for (rep = 0; rep < repeat_num; rep++) {
1274 adll_value = (direction == 0) ? (adll * 2) : (adll * 3);
1275 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1276 start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1277 (ctrl_level_phase[if_id * cs *
1278 octets_per_if_num +
1279 pup] >> 6) * 32;
1280
1281 if (direction == 0)
1282 start_adll = (start_adll > 32) ? (start_adll - 32) : 0;
1283 else
1284 start_adll = (start_adll > 48) ? (start_adll - 48) : 0;
1285
1286 adll_value += start_adll;
1287
1288 gap = ctrl_adll1[if_id * cs * octets_per_if_num + pup] -
1289 ctrl_adll[if_id * cs * octets_per_if_num + pup];
1290 gap = (((adll_value % 32) + gap) % 64);
1291
1292 adll_value = ((adll_value % 32) +
1293 (((adll_value - (adll_value % 32)) / 32) << 6));
1294
1295 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1296 ACCESS_TYPE_UNICAST,
1297 if_id,
1298 pup_access,
1299 pup,
1300 DDR_PHY_DATA,
1301 reg,
1302 adll_value));
1303 if (direction == 0)
1304 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1305 ACCESS_TYPE_UNICAST,
1306 if_id,
1307 pup_access,
1308 pup,
1309 DDR_PHY_DATA,
1310 CTX_PHY_REG(cs),
1311 gap));
1312 }
1313
1314 for (pattern_idx = PATTERN_KILLER_DQ0;
1315 pattern_idx < PATTERN_LAST;
1316 pattern_idx++) {
1317 hws_ddr3_run_bist(dev_num, sweep_pattern, res, cs);
1318 ddr3_tip_reset_fifo_ptr(dev_num);
1319 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1320 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1321 if (pup != 4) { /* TODO: remove literal */
1322 ctrl_sweepres[adll][if_id][pup] += res[if_id];
1323 } else {
1324 CHECK_STATUS(ddr3_tip_if_read(dev_num,
1325 ACCESS_TYPE_UNICAST,
1326 if_id,
1327 0x1458,
1328 read_data,
1329 MASK_ALL_BITS));
1330 ctrl_sweepres[adll][if_id][pup] += read_data[if_id];
1331 CHECK_STATUS(ddr3_tip_if_write(dev_num,
1332 ACCESS_TYPE_UNICAST,
1333 if_id,
1334 0x1458,
1335 0x0,
1336 0xFFFFFFFF));
1337 CHECK_STATUS(ddr3_tip_if_write(dev_num,
1338 ACCESS_TYPE_UNICAST,
1339 if_id,
1340 0x145C,
1341 0x0,
1342 0xFFFFFFFF));
1343 }
1344 }
1345 }
1346 }
1347 }
1348
1349 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1350 start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1351 ctrl_level_phase[if_id * cs * octets_per_if_num + pup];
1352 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, pup_access, pup,
1353 DDR_PHY_DATA, reg, start_adll));
1354 if (direction == 0)
1355 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1356 ACCESS_TYPE_UNICAST,
1357 if_id,
1358 pup_access,
1359 pup,
1360 DDR_PHY_DATA,
1361 CTX_PHY_REG(cs),
1362 ctrl_adll1[if_id *
1363 cs *
1364 octets_per_if_num +
1365 pup]));
1366 }
1367 }
1368
1369 printf("Final,CS %d,%s,Leveling,Result,Adll,", cs, ((direction == 0) ? "TX" : "RX"));
1370
1371 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1372 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1373 if (mode == 1) {
1374 for (pup = start_pup; pup <= end_pup; pup++) {
1375 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1376 printf("I/F%d-PHY%d , ", if_id, pup);
1377 }
1378 } else {
1379 printf("I/F%d , ", if_id);
1380 }
1381 }
1382 printf("\n");
1383
1384 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1385 adll_value = (direction == 0) ? ((adll * 2) - 32) : ((adll * 3) - 48);
1386 printf("Final,%s,LevelingSweep,Result, %d ,", ((direction == 0) ? "TX" : "RX"), adll_value);
1387
1388 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1389 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1390 for (pup = start_pup; pup <= end_pup; pup++)
1391 printf("%8d , ", ctrl_sweepres[adll][if_id][pup]);
1392 }
1393 printf("\n");
1394 }
1395
1396 /* write back to the phy the Rx DQS value, we store in the beginning */
1397 write_leveling_value(dev_num, ctrl_adll, ctrl_level_phase, reg);
1398 if (direction == 0)
1399 ddr3_tip_write_adll_value(dev_num, ctrl_adll1, CTX_PHY_REG(cs));
1400
1401 /* print adll results */
1402 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1403 printf("%s,DQS,Leveling,,,", (direction == 0) ? "Tx" : "Rx");
1404 print_adll(dev_num, ctrl_adll);
1405 print_ph(dev_num, ctrl_level_phase);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001406 }
1407 ddr3_tip_reset_fifo_ptr(dev_num);
1408
1409 return 0;
1410}
Chris Packham1a07d212018-05-10 13:28:29 +12001411#endif /* EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001412
Chris Packham1a07d212018-05-10 13:28:29 +12001413void print_topology(struct mv_ddr_topology_map *topology_db)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001414{
1415 u32 ui, uj;
Chris Packham1a07d212018-05-10 13:28:29 +12001416 u32 dev_num = 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001417
1418 printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
Chris Packham1a07d212018-05-10 13:28:29 +12001419 printf("\tNumber of buses: 0x%x\n",
1420 ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE));
Stefan Roese5ffceb82015-03-26 15:36:56 +01001421 printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1422
1423 for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001424 VALIDATE_IF_ACTIVE(topology_db->if_act_mask, ui);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001425 printf("\n\tInterface ID: %d\n", ui);
1426 printf("\t\tDDR Frequency: %s\n",
1427 convert_freq(topology_db->
1428 interface_params[ui].memory_freq));
1429 printf("\t\tSpeed_bin: %d\n",
1430 topology_db->interface_params[ui].speed_bin_index);
1431 printf("\t\tBus_width: %d\n",
1432 (4 << topology_db->interface_params[ui].bus_width));
1433 printf("\t\tMem_size: %s\n",
1434 convert_mem_size(topology_db->
1435 interface_params[ui].memory_size));
1436 printf("\t\tCAS-WL: %d\n",
1437 topology_db->interface_params[ui].cas_wl);
1438 printf("\t\tCAS-L: %d\n",
1439 topology_db->interface_params[ui].cas_l);
1440 printf("\t\tTemperature: %d\n",
1441 topology_db->interface_params[ui].interface_temp);
1442 printf("\n");
1443 for (uj = 0; uj < 4; uj++) {
1444 printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1445 topology_db->interface_params[ui].
1446 as_bus_params[uj].cs_bitmask);
1447 printf("Mirror: 0x%x\t",
1448 topology_db->interface_params[ui].
1449 as_bus_params[uj].mirror_enable_bitmask);
1450 printf("DQS Swap is %s \t",
1451 (topology_db->
1452 interface_params[ui].as_bus_params[uj].
1453 is_dqs_swap == 1) ? "enabled" : "disabled");
1454 printf("Ck Swap:%s\t",
1455 (topology_db->
1456 interface_params[ui].as_bus_params[uj].
1457 is_ck_swap == 1) ? "enabled" : "disabled");
1458 printf("\n");
1459 }
1460 }
1461}
Chris Packham1a07d212018-05-10 13:28:29 +12001462#endif /* DDR_VIEWER_TOOL */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001463
Chris Packham1a07d212018-05-10 13:28:29 +12001464#if !defined(EXCLUDE_SWITCH_DEBUG)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001465/*
1466 * Execute XSB Test transaction (rd/wr/both)
1467 */
1468int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1469 u32 read_type, u32 burst_length)
1470{
1471 u32 seq = 0, if_id = 0, addr, cnt;
1472 int ret = MV_OK, ret_tmp;
1473 u32 data_read[MAX_INTERFACE_NUM];
Chris Packham1a07d212018-05-10 13:28:29 +12001474 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +01001475
1476 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001477 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001478 addr = mem_addr;
1479 for (cnt = 0; cnt <= burst_length; cnt++) {
1480 seq = (seq + 1) % 8;
1481 if (write_type != 0) {
1482 CHECK_STATUS(ddr3_tip_ext_write
1483 (dev_num, if_id, addr, 1,
1484 xsb_test_table[seq]));
1485 }
1486 if (read_type != 0) {
1487 CHECK_STATUS(ddr3_tip_ext_read
1488 (dev_num, if_id, addr, 1,
1489 data_read));
1490 }
1491 if ((read_type != 0) && (write_type != 0)) {
1492 ret_tmp =
1493 ddr3_tip_compare(if_id,
1494 xsb_test_table[seq],
1495 data_read,
1496 0xff);
1497 addr += (EXT_ACCESS_BURST_LENGTH * 4);
1498 ret = (ret != MV_OK) ? ret : ret_tmp;
1499 }
1500 }
1501 }
1502
1503 return ret;
1504}
1505
1506#else /*EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001507u32 start_xsb_offset = 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001508
1509int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1510 u32 read_type, u32 burst_length)
1511{
1512 return MV_OK;
1513}
1514
Chris Packham1a07d212018-05-10 13:28:29 +12001515#endif /* EXCLUDE_SWITCH_DEBUG */