blob: c659ae92d80fb7cd3dae2995ae15307b956c6d99 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Stefan Roese5ffceb82015-03-26 15:36:56 +01002/*
3 * Copyright (C) Marvell International Ltd. and its affiliates
Stefan Roese5ffceb82015-03-26 15:36:56 +01004 */
5
Stefan Roese5ffceb82015-03-26 15:36:56 +01006#include "ddr3_init.h"
Chris Packham4bf81db2018-12-03 14:26:49 +13007#include "mv_ddr_training_db.h"
8#include "mv_ddr_regs.h"
Stefan Roese5ffceb82015-03-26 15:36:56 +01009
10u8 is_reg_dump = 0;
11u8 debug_pbs = DEBUG_LEVEL_ERROR;
12
13/*
14 * API to change flags outside of the lib
15 */
Chris Packham1a07d212018-05-10 13:28:29 +120016#if defined(SILENT_LIB)
17void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
18{
19 /* do nothing */
20}
21#else /* SILENT_LIB */
Stefan Roese5ffceb82015-03-26 15:36:56 +010022/* Debug flags for other Training modules */
23u8 debug_training_static = DEBUG_LEVEL_ERROR;
24u8 debug_training = DEBUG_LEVEL_ERROR;
25u8 debug_leveling = DEBUG_LEVEL_ERROR;
26u8 debug_centralization = DEBUG_LEVEL_ERROR;
27u8 debug_training_ip = DEBUG_LEVEL_ERROR;
28u8 debug_training_bist = DEBUG_LEVEL_ERROR;
29u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
30u8 debug_training_access = DEBUG_LEVEL_ERROR;
Chris Packham1a07d212018-05-10 13:28:29 +120031u8 debug_training_device = DEBUG_LEVEL_ERROR;
32
Tony Dinhe2c524b2023-01-18 19:03:04 -080033#if defined(CONFIG_DDR4)
34u8 debug_tap_tuning = DEBUG_LEVEL_ERROR;
35u8 debug_calibration = DEBUG_LEVEL_ERROR;
36u8 debug_ddr4_centralization = DEBUG_LEVEL_ERROR;
37u8 debug_dm_tuning = DEBUG_LEVEL_ERROR;
38#endif /* CONFIG_DDR4 */
Chris Packham1a07d212018-05-10 13:28:29 +120039
40void mv_ddr_user_log_level_set(enum ddr_lib_debug_block block)
41{
42 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
43 ddr3_hws_set_log_level(block, tm->debug_level);
44};
Stefan Roese5ffceb82015-03-26 15:36:56 +010045
46void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
47{
48 switch (block) {
49 case DEBUG_BLOCK_STATIC:
50 debug_training_static = level;
51 break;
52 case DEBUG_BLOCK_TRAINING_MAIN:
53 debug_training = level;
54 break;
55 case DEBUG_BLOCK_LEVELING:
56 debug_leveling = level;
57 break;
58 case DEBUG_BLOCK_CENTRALIZATION:
59 debug_centralization = level;
60 break;
61 case DEBUG_BLOCK_PBS:
62 debug_pbs = level;
63 break;
64 case DEBUG_BLOCK_ALG:
65 debug_training_hw_alg = level;
66 break;
67 case DEBUG_BLOCK_DEVICE:
Chris Packham1a07d212018-05-10 13:28:29 +120068 debug_training_device = level;
Stefan Roese5ffceb82015-03-26 15:36:56 +010069 break;
70 case DEBUG_BLOCK_ACCESS:
71 debug_training_access = level;
72 break;
73 case DEBUG_STAGES_REG_DUMP:
74 if (level == DEBUG_LEVEL_TRACE)
75 is_reg_dump = 1;
76 else
77 is_reg_dump = 0;
78 break;
Tony Dinhe2c524b2023-01-18 19:03:04 -080079#if defined(CONFIG_DDR4)
80 case DEBUG_TAP_TUNING_ENGINE:
81 debug_tap_tuning = level;
82 break;
83 case DEBUG_BLOCK_CALIBRATION:
84 debug_calibration = level;
85 break;
86 case DEBUG_BLOCK_DDR4_CENTRALIZATION:
87 debug_ddr4_centralization = level;
88 break;
89#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +010090 case DEBUG_BLOCK_ALL:
91 default:
92 debug_training_static = level;
93 debug_training = level;
94 debug_leveling = level;
95 debug_centralization = level;
96 debug_pbs = level;
97 debug_training_hw_alg = level;
98 debug_training_access = level;
Chris Packham1a07d212018-05-10 13:28:29 +120099 debug_training_device = level;
Tony Dinhe2c524b2023-01-18 19:03:04 -0800100#if defined(CONFIG_DDR4)
101 debug_tap_tuning = level;
102 debug_calibration = level;
103 debug_ddr4_centralization = level;
104#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100105 }
106}
Chris Packham1a07d212018-05-10 13:28:29 +1200107#endif /* SILENT_LIB */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100108
Chris Packham1a07d212018-05-10 13:28:29 +1200109#if defined(DDR_VIEWER_TOOL)
Chris Packham4bf81db2018-12-03 14:26:49 +1300110static char *convert_freq(enum mv_ddr_freq freq);
Chris Packham1a07d212018-05-10 13:28:29 +1200111#if defined(EXCLUDE_SWITCH_DEBUG)
112u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
113u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
114u32 ctrl_adll1[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
115u32 ctrl_level_phase[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
116#endif /* EXCLUDE_SWITCH_DEBUG */
117#endif /* DDR_VIEWER_TOOL */
118
119struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100120u8 is_validate_window_per_if = 0;
121u8 is_validate_window_per_pup = 0;
122u8 sweep_cnt = 1;
Chris Packham1a07d212018-05-10 13:28:29 +1200123u8 is_run_leveling_sweep_tests;
124
125static struct hws_xsb_info xsb_info[MAX_DEVICE_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100126
127/*
128 * Dump Dunit & Phy registers
129 */
130int ddr3_tip_reg_dump(u32 dev_num)
131{
132 u32 if_id, reg_addr, data_value, bus_id;
133 u32 read_data[MAX_INTERFACE_NUM];
Chris Packham1a07d212018-05-10 13:28:29 +1200134 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
135 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100136
137 printf("-- dunit registers --\n");
138 for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
139 printf("0x%x ", reg_addr);
140 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200141 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100142 CHECK_STATUS(ddr3_tip_if_read
143 (dev_num, ACCESS_TYPE_UNICAST,
144 if_id, reg_addr, read_data,
145 MASK_ALL_BITS));
146 printf("0x%x ", read_data[if_id]);
147 }
148 printf("\n");
149 }
150
151 printf("-- Phy registers --\n");
152 for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
153 printf("0x%x ", reg_addr);
154 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200155 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100156 for (bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200157 bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100158 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200159 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100160 CHECK_STATUS(ddr3_tip_bus_read
161 (dev_num, if_id,
162 ACCESS_TYPE_UNICAST, bus_id,
163 DDR_PHY_DATA, reg_addr,
164 &data_value));
165 printf("0x%x ", data_value);
166 }
167 for (bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200168 bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100169 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200170 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100171 CHECK_STATUS(ddr3_tip_bus_read
172 (dev_num, if_id,
173 ACCESS_TYPE_UNICAST, bus_id,
174 DDR_PHY_CONTROL, reg_addr,
175 &data_value));
176 printf("0x%x ", data_value);
177 }
178 }
179 printf("\n");
180 }
181
182 return MV_OK;
183}
184
185/*
186 * Register access func registration
187 */
188int ddr3_tip_init_config_func(u32 dev_num,
189 struct hws_tip_config_func_db *config_func)
190{
191 if (config_func == NULL)
192 return MV_BAD_PARAM;
193
194 memcpy(&config_func_info[dev_num], config_func,
195 sizeof(struct hws_tip_config_func_db));
196
197 return MV_OK;
198}
199
200/*
Stefan Roese5ffceb82015-03-26 15:36:56 +0100201 * Get training result info pointer
202 */
203enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
204{
205 return training_result[stage];
206}
207
208/*
209 * Device info read
210 */
211int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
212{
213 if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
214 return config_func_info[dev_num].
215 tip_get_device_info_func((u8) dev_num, info_ptr);
216 }
217
218 return MV_FAIL;
219}
220
Chris Packham1a07d212018-05-10 13:28:29 +1200221#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100222/*
223 * Convert freq to character string
224 */
Chris Packham4bf81db2018-12-03 14:26:49 +1300225static char *convert_freq(enum mv_ddr_freq freq)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100226{
227 switch (freq) {
Chris Packham4bf81db2018-12-03 14:26:49 +1300228 case MV_DDR_FREQ_LOW_FREQ:
229 return "MV_DDR_FREQ_LOW_FREQ";
Chris Packham1a07d212018-05-10 13:28:29 +1200230
Tony Dinhe2c524b2023-01-18 19:03:04 -0800231#if !defined(CONFIG_DDR4)
Chris Packham4bf81db2018-12-03 14:26:49 +1300232 case MV_DDR_FREQ_400:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100233 return "400";
234
Chris Packham4bf81db2018-12-03 14:26:49 +1300235 case MV_DDR_FREQ_533:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100236 return "533";
Tony Dinhe2c524b2023-01-18 19:03:04 -0800237#endif /* CONFIG_DDR4 */
Chris Packham1a07d212018-05-10 13:28:29 +1200238
Chris Packham4bf81db2018-12-03 14:26:49 +1300239 case MV_DDR_FREQ_667:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100240 return "667";
241
Chris Packham4bf81db2018-12-03 14:26:49 +1300242 case MV_DDR_FREQ_800:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100243 return "800";
244
Chris Packham4bf81db2018-12-03 14:26:49 +1300245 case MV_DDR_FREQ_933:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100246 return "933";
247
Chris Packham4bf81db2018-12-03 14:26:49 +1300248 case MV_DDR_FREQ_1066:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100249 return "1066";
Chris Packham1a07d212018-05-10 13:28:29 +1200250
Tony Dinhe2c524b2023-01-18 19:03:04 -0800251#if !defined(CONFIG_DDR4)
Chris Packham4bf81db2018-12-03 14:26:49 +1300252 case MV_DDR_FREQ_311:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100253 return "311";
254
Chris Packham4bf81db2018-12-03 14:26:49 +1300255 case MV_DDR_FREQ_333:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100256 return "333";
257
Chris Packham4bf81db2018-12-03 14:26:49 +1300258 case MV_DDR_FREQ_467:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100259 return "467";
260
Chris Packham4bf81db2018-12-03 14:26:49 +1300261 case MV_DDR_FREQ_850:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100262 return "850";
263
Chris Packham4bf81db2018-12-03 14:26:49 +1300264 case MV_DDR_FREQ_900:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100265 return "900";
266
Chris Packham4bf81db2018-12-03 14:26:49 +1300267 case MV_DDR_FREQ_360:
268 return "MV_DDR_FREQ_360";
Stefan Roese5ffceb82015-03-26 15:36:56 +0100269
Chris Packham4bf81db2018-12-03 14:26:49 +1300270 case MV_DDR_FREQ_1000:
271 return "MV_DDR_FREQ_1000";
Tony Dinhe2c524b2023-01-18 19:03:04 -0800272#endif /* CONFIG_DDR4 */
Chris Packham1a07d212018-05-10 13:28:29 +1200273
Stefan Roese5ffceb82015-03-26 15:36:56 +0100274 default:
275 return "Unknown Frequency";
276 }
277}
278
279/*
280 * Convert device ID to character string
281 */
282static char *convert_dev_id(u32 dev_id)
283{
284 switch (dev_id) {
285 case 0x6800:
286 return "A38xx";
287 case 0x6900:
288 return "A39XX";
289 case 0xf400:
290 return "AC3";
291 case 0xfc00:
292 return "BC2";
293
294 default:
295 return "Unknown Device";
296 }
297}
298
299/*
300 * Convert device ID to character string
301 */
302static char *convert_mem_size(u32 dev_id)
303{
304 switch (dev_id) {
305 case 0:
306 return "512 MB";
307 case 1:
308 return "1 GB";
309 case 2:
310 return "2 GB";
311 case 3:
312 return "4 GB";
313 case 4:
314 return "8 GB";
315
316 default:
317 return "wrong mem size";
318 }
319}
320
321int print_device_info(u8 dev_num)
322{
323 struct ddr3_device_info info_ptr;
Chris Packham1a07d212018-05-10 13:28:29 +1200324 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100325
326 CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
327 printf("=== DDR setup START===\n");
328 printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
329 printf("\tDDR3 CK delay: %d\n", info_ptr.ck_delay);
330 print_topology(tm);
331 printf("=== DDR setup END===\n");
332
333 return MV_OK;
334}
335
336void hws_ddr3_tip_sweep_test(int enable)
337{
338 if (enable) {
339 is_validate_window_per_if = 1;
340 is_validate_window_per_pup = 1;
341 debug_training = DEBUG_LEVEL_TRACE;
342 } else {
343 is_validate_window_per_if = 0;
344 is_validate_window_per_pup = 0;
345 }
346}
Chris Packham1a07d212018-05-10 13:28:29 +1200347#endif /* DDR_VIEWER_TOOL */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100348
349char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
350{
351 switch (tune_result) {
352 case TEST_FAILED:
353 return "FAILED";
354 case TEST_SUCCESS:
355 return "PASS";
356 case NO_TEST_DONE:
357 return "NOT COMPLETED";
358 default:
359 return "Un-KNOWN";
360 }
361}
362
363/*
364 * Print log info
365 */
366int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
367{
368 u32 if_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200369 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100370
Chris Packham1a07d212018-05-10 13:28:29 +1200371#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100372 if ((is_validate_window_per_if != 0) ||
373 (is_validate_window_per_pup != 0)) {
374 u32 is_pup_log = 0;
Chris Packham4bf81db2018-12-03 14:26:49 +1300375 enum mv_ddr_freq freq;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100376
377 freq = tm->interface_params[first_active_if].memory_freq;
378
379 is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
380 printf("===VALIDATE WINDOW LOG START===\n");
381 printf("DDR Frequency: %s ======\n", convert_freq(freq));
382 /* print sweep windows */
383 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
384 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
Chris Packham1a07d212018-05-10 13:28:29 +1200385#if defined(EXCLUDE_SWITCH_DEBUG)
386 if (is_run_leveling_sweep_tests == 1) {
387 ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
388 ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
389 }
390#endif /* EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100391 ddr3_tip_print_all_pbs_result(dev_num);
392 ddr3_tip_print_wl_supp_result(dev_num);
393 printf("===VALIDATE WINDOW LOG END ===\n");
394 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
395 ddr3_tip_reg_dump(dev_num);
396 }
Chris Packham1a07d212018-05-10 13:28:29 +1200397#endif /* DDR_VIEWER_TOOL */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100398
Marek Behúnbbafabc2024-06-18 17:34:25 +0200399 /* return early if we won't print anything anyway */
400 if (
401#if defined(SILENT_LIB)
402 1 ||
403#endif
404 debug_training < DEBUG_LEVEL_INFO) {
405 return MV_OK;
406 }
407
Stefan Roese5ffceb82015-03-26 15:36:56 +0100408 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200409 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100410
411 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
412 ("IF %d Status:\n", if_id));
413
414 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
415 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
416 ("\tInit Controller: %s\n",
417 ddr3_tip_convert_tune_result
418 (training_result[INIT_CONTROLLER]
419 [if_id])));
420 }
421 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
422 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
423 ("\tLow freq Config: %s\n",
424 ddr3_tip_convert_tune_result
425 (training_result[SET_LOW_FREQ]
426 [if_id])));
427 }
428 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
429 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
430 ("\tLoad Pattern: %s\n",
431 ddr3_tip_convert_tune_result
432 (training_result[LOAD_PATTERN]
433 [if_id])));
434 }
435 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
436 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
437 ("\tMedium freq Config: %s\n",
438 ddr3_tip_convert_tune_result
439 (training_result[SET_MEDIUM_FREQ]
440 [if_id])));
441 }
442 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
443 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
444 ("\tWL: %s\n",
445 ddr3_tip_convert_tune_result
446 (training_result[WRITE_LEVELING]
447 [if_id])));
448 }
449 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
450 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
451 ("\tLoad Pattern: %s\n",
452 ddr3_tip_convert_tune_result
453 (training_result[LOAD_PATTERN_2]
454 [if_id])));
455 }
456 if (mask_tune_func & READ_LEVELING_MASK_BIT) {
457 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
458 ("\tRL: %s\n",
459 ddr3_tip_convert_tune_result
460 (training_result[READ_LEVELING]
461 [if_id])));
462 }
463 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
464 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
465 ("\tWL Supp: %s\n",
466 ddr3_tip_convert_tune_result
467 (training_result[WRITE_LEVELING_SUPP]
468 [if_id])));
469 }
470 if (mask_tune_func & PBS_RX_MASK_BIT) {
471 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
472 ("\tPBS RX: %s\n",
473 ddr3_tip_convert_tune_result
474 (training_result[PBS_RX]
475 [if_id])));
476 }
477 if (mask_tune_func & PBS_TX_MASK_BIT) {
478 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
479 ("\tPBS TX: %s\n",
480 ddr3_tip_convert_tune_result
481 (training_result[PBS_TX]
482 [if_id])));
483 }
484 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
485 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
486 ("\tTarget freq Config: %s\n",
487 ddr3_tip_convert_tune_result
488 (training_result[SET_TARGET_FREQ]
489 [if_id])));
490 }
491 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
492 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
493 ("\tWL TF: %s\n",
494 ddr3_tip_convert_tune_result
495 (training_result[WRITE_LEVELING_TF]
496 [if_id])));
497 }
Tony Dinhe2c524b2023-01-18 19:03:04 -0800498#if !defined(CONFIG_DDR4)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100499 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
500 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
501 ("\tRL TF: %s\n",
502 ddr3_tip_convert_tune_result
503 (training_result[READ_LEVELING_TF]
504 [if_id])));
505 }
Tony Dinhe2c524b2023-01-18 19:03:04 -0800506#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100507 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
508 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
509 ("\tWL TF Supp: %s\n",
510 ddr3_tip_convert_tune_result
511 (training_result
512 [WRITE_LEVELING_SUPP_TF]
513 [if_id])));
514 }
515 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
516 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
517 ("\tCentr RX: %s\n",
518 ddr3_tip_convert_tune_result
519 (training_result[CENTRALIZATION_RX]
520 [if_id])));
521 }
522 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
523 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
524 ("\tVREF_CALIBRATION: %s\n",
525 ddr3_tip_convert_tune_result
526 (training_result[VREF_CALIBRATION]
527 [if_id])));
528 }
529 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
530 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
531 ("\tCentr TX: %s\n",
532 ddr3_tip_convert_tune_result
533 (training_result[CENTRALIZATION_TX]
534 [if_id])));
535 }
Tony Dinhe2c524b2023-01-18 19:03:04 -0800536#if defined(CONFIG_DDR4)
537 if (mask_tune_func & SW_READ_LEVELING_MASK_BIT) {
538 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
539 ("\tSW RL TF: %s\n",
540 ddr3_tip_convert_tune_result
541 (training_result[SW_READ_LEVELING]
542 [if_id])));
543 }
544 if (mask_tune_func & RECEIVER_CALIBRATION_MASK_BIT) {
545 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
546 ("\tRX CAL: %s\n",
547 ddr3_tip_convert_tune_result
548 (training_result[RECEIVER_CALIBRATION]
549 [if_id])));
550 }
551 if (mask_tune_func & WL_PHASE_CORRECTION_MASK_BIT) {
552 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
553 ("\tWL PHASE CORRECT: %s\n",
554 ddr3_tip_convert_tune_result
555 (training_result[WL_PHASE_CORRECTION]
556 [if_id])));
557 }
558 if (mask_tune_func & DQ_VREF_CALIBRATION_MASK_BIT) {
559 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
560 ("\tDQ VREF CAL: %s\n",
561 ddr3_tip_convert_tune_result
562 (training_result[DQ_VREF_CALIBRATION]
563 [if_id])));
564 }
565 if (mask_tune_func & DQ_MAPPING_MASK_BIT) {
566 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
567 ("\tDQ MAP: %s\n",
568 ddr3_tip_convert_tune_result
569 (training_result[DQ_MAPPING]
570 [if_id])));
571 }
572#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100573 }
574
575 return MV_OK;
576}
577
Chris Packham1a07d212018-05-10 13:28:29 +1200578#if !defined(EXCLUDE_DEBUG_PRINTS)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100579/*
580 * Print stability log info
581 */
582int ddr3_tip_print_stability_log(u32 dev_num)
583{
584 u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
585 u32 reg_data;
Tony Dinhe2c524b2023-01-18 19:03:04 -0800586#if defined(CONFIG_DDR4)
587 u32 reg_data1;
588#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100589 u32 read_data[MAX_INTERFACE_NUM];
Chris Packham4bf81db2018-12-03 14:26:49 +1300590 unsigned int max_cs = mv_ddr_cs_num_get();
Chris Packham1a07d212018-05-10 13:28:29 +1200591 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100592
593 /* Title print */
594 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200595 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100596 printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
597 for (csindex = 0; csindex < max_cs; csindex++) {
598 printf("CS%d , ", csindex);
599 printf("\n");
Chris Packham1a07d212018-05-10 13:28:29 +1200600 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Tony Dinhe2c524b2023-01-18 19:03:04 -0800601#if defined(CONFIG_DDR4)
602 printf("DminTx, AreaTx, DminRx, AreaRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, CenTx, CenRx, Vref, DQVref,");
603 for (idx = 0; idx < 11; idx++)
604 printf("DC-Pad%d,", idx);
605#else /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100606 printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
Tony Dinhe2c524b2023-01-18 19:03:04 -0800607#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100608 printf("\t\t");
609 for (idx = 0; idx < 11; idx++)
610 printf("PBSTx-Pad%d,", idx);
611 printf("\t\t");
612 for (idx = 0; idx < 11; idx++)
613 printf("PBSRx-Pad%d,", idx);
614 }
615 }
616 printf("\n");
617
618 /* Data print */
619 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200620 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100621
622 printf("Data: %d,%d,", if_id,
623 (config_func_info[dev_num].tip_get_temperature != NULL)
624 ? (config_func_info[dev_num].
625 tip_get_temperature(dev_num)) : (0));
626
627 CHECK_STATUS(ddr3_tip_if_read
628 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
629 read_data, MASK_ALL_BITS));
630 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
631 ((read_data[if_id] & 0xfc00) >> 10));
632 CHECK_STATUS(ddr3_tip_if_read
633 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
634 read_data, MASK_ALL_BITS));
635 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
636 ((read_data[if_id] & 0xfc00) >> 10));
637 CHECK_STATUS(ddr3_tip_if_read
638 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
639 read_data, MASK_ALL_BITS));
640 printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
641 ((read_data[if_id] & 0xfc00000) >> 22));
642
643 for (csindex = 0; csindex < max_cs; csindex++) {
644 printf("CS%d , ", csindex);
645 for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
646 printf("\n");
Chris Packham1a07d212018-05-10 13:28:29 +1200647 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Tony Dinhe2c524b2023-01-18 19:03:04 -0800648#if defined(CONFIG_DDR4)
649 /* DminTx, areaTX */
650 ddr3_tip_bus_read(dev_num, if_id,
651 ACCESS_TYPE_UNICAST,
652 bus_id, DDR_PHY_DATA,
653 RESULT_PHY_REG +
654 csindex, &reg_data);
655 ddr3_tip_bus_read(dev_num, if_id,
656 ACCESS_TYPE_UNICAST,
657 dmin_phy_reg_table
658 [csindex * 5 + bus_id][0],
659 DDR_PHY_CONTROL,
660 dmin_phy_reg_table
661 [csindex * 5 + bus_id][1],
662 &reg_data1);
663 printf("%d,%d,", 2 * (reg_data1 & 0xFF),
664 reg_data);
665 /* DminRx, areaRX */
666 ddr3_tip_bus_read(dev_num, if_id,
667 ACCESS_TYPE_UNICAST,
668 bus_id, DDR_PHY_DATA,
669 RESULT_PHY_REG +
670 csindex + 4, &reg_data);
671 ddr3_tip_bus_read(dev_num, if_id,
672 ACCESS_TYPE_UNICAST,
673 dmin_phy_reg_table
674 [csindex * 5 + bus_id][0],
675 DDR_PHY_CONTROL,
676 dmin_phy_reg_table
677 [csindex * 5 + bus_id][1],
678 &reg_data1);
679 printf("%d,%d,", 2 * (reg_data1 >> 8),
680 reg_data);
681#else /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100682 ddr3_tip_bus_read(dev_num, if_id,
683 ACCESS_TYPE_UNICAST,
684 bus_id, DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200685 RESULT_PHY_REG +
Stefan Roese5ffceb82015-03-26 15:36:56 +0100686 csindex, &reg_data);
687 printf("%d,%d,", (reg_data & 0x1f),
688 ((reg_data & 0x3e0) >> 5));
Tony Dinhe2c524b2023-01-18 19:03:04 -0800689#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100690 /* WL */
691 ddr3_tip_bus_read(dev_num, if_id,
692 ACCESS_TYPE_UNICAST,
693 bus_id, DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200694 WL_PHY_REG(csindex),
695 &reg_data);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100696 printf("%d,%d,%d,",
697 (reg_data & 0x1f) +
698 ((reg_data & 0x1c0) >> 6) * 32,
699 (reg_data & 0x1f),
700 (reg_data & 0x1c0) >> 6);
701 /* RL */
702 CHECK_STATUS(ddr3_tip_if_read
703 (dev_num, ACCESS_TYPE_UNICAST,
704 if_id,
Chris Packham1a07d212018-05-10 13:28:29 +1200705 RD_DATA_SMPL_DLYS_REG,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100706 read_data, MASK_ALL_BITS));
707 read_data[if_id] =
708 (read_data[if_id] &
Chris Packham1a07d212018-05-10 13:28:29 +1200709 (0x1f << (8 * csindex))) >>
710 (8 * csindex);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100711 ddr3_tip_bus_read(dev_num, if_id,
712 ACCESS_TYPE_UNICAST, bus_id,
713 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200714 RL_PHY_REG(csindex),
Stefan Roese5ffceb82015-03-26 15:36:56 +0100715 &reg_data);
716 printf("%d,%d,%d,%d,",
717 (reg_data & 0x1f) +
718 ((reg_data & 0x1c0) >> 6) * 32 +
719 read_data[if_id] * 64,
720 (reg_data & 0x1f),
721 ((reg_data & 0x1c0) >> 6),
722 read_data[if_id]);
723 /* Centralization */
724 ddr3_tip_bus_read(dev_num, if_id,
725 ACCESS_TYPE_UNICAST, bus_id,
726 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200727 CTX_PHY_REG(csindex),
728 &reg_data);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100729 printf("%d,", (reg_data & 0x3f));
730 ddr3_tip_bus_read(dev_num, if_id,
731 ACCESS_TYPE_UNICAST, bus_id,
732 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200733 CRX_PHY_REG(csindex),
734 &reg_data);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100735 printf("%d,", (reg_data & 0x1f));
736 /* Vref */
737 ddr3_tip_bus_read(dev_num, if_id,
738 ACCESS_TYPE_UNICAST, bus_id,
739 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200740 PAD_CFG_PHY_REG,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100741 &reg_data);
742 printf("%d,", (reg_data & 0x7));
743 /* DQVref */
744 /* Need to add the Read Function from device */
745 printf("%d,", 0);
Tony Dinhe2c524b2023-01-18 19:03:04 -0800746#if defined(CONFIG_DDR4)
747 printf("\t\t");
748 for (idx = 0; idx < 11; idx++) {
749 ddr3_tip_bus_read(dev_num, if_id,
750 ACCESS_TYPE_UNICAST,
751 bus_id, DDR_PHY_DATA,
752 0xd0 + 12 * csindex +
753 idx, &reg_data);
754 printf("%d,", (reg_data & 0x3f));
755 }
756#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100757 printf("\t\t");
758 for (idx = 0; idx < 11; idx++) {
759 ddr3_tip_bus_read(dev_num, if_id,
760 ACCESS_TYPE_UNICAST,
761 bus_id, DDR_PHY_DATA,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100762 0x10 +
763 16 * csindex +
764 idx, &reg_data);
765 printf("%d,", (reg_data & 0x3f));
766 }
767 printf("\t\t");
768 for (idx = 0; idx < 11; idx++) {
769 ddr3_tip_bus_read(dev_num, if_id,
770 ACCESS_TYPE_UNICAST,
771 bus_id, DDR_PHY_DATA,
772 0x50 +
773 16 * csindex +
774 idx, &reg_data);
775 printf("%d,", (reg_data & 0x3f));
776 }
777 }
778 }
779 }
780 printf("\n");
781
782 return MV_OK;
783}
Chris Packham1a07d212018-05-10 13:28:29 +1200784#endif /* EXCLUDE_DEBUG_PRINTS */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100785
786/*
787 * Register XSB information
788 */
789int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
790{
791 memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
792 return MV_OK;
793}
794
795/*
796 * Read ADLL Value
797 */
Chris Packham1a07d212018-05-10 13:28:29 +1200798int ddr3_tip_read_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
799 u32 reg_addr, u32 mask)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100800{
801 u32 data_value;
802 u32 if_id = 0, bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200803 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
804 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100805
806 /*
807 * multi CS support - reg_addr is calucalated in calling function
808 * with CS offset
809 */
810 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200811 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
812 for (bus_id = 0; bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100813 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200814 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100815 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
816 ACCESS_TYPE_UNICAST,
817 bus_id,
818 DDR_PHY_DATA, reg_addr,
819 &data_value));
820 pup_values[if_id *
Chris Packham1a07d212018-05-10 13:28:29 +1200821 octets_per_if_num + bus_id] =
Stefan Roese5ffceb82015-03-26 15:36:56 +0100822 data_value & mask;
823 }
824 }
825
826 return 0;
827}
828
829/*
830 * Write ADLL Value
831 */
Chris Packham1a07d212018-05-10 13:28:29 +1200832int ddr3_tip_write_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
833 u32 reg_addr)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100834{
835 u32 if_id = 0, bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200836 u32 data;
837 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
838 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100839
840 /*
841 * multi CS support - reg_addr is calucalated in calling function
842 * with CS offset
843 */
844 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200845 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
846 for (bus_id = 0; bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100847 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200848 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100849 data = pup_values[if_id *
Chris Packham1a07d212018-05-10 13:28:29 +1200850 octets_per_if_num +
Stefan Roese5ffceb82015-03-26 15:36:56 +0100851 bus_id];
852 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
853 ACCESS_TYPE_UNICAST,
854 if_id,
855 ACCESS_TYPE_UNICAST,
856 bus_id, DDR_PHY_DATA,
857 reg_addr, data));
858 }
859 }
860
861 return 0;
862}
863
Chris Packham1a07d212018-05-10 13:28:29 +1200864/**
865 * Read Phase Value
866 */
867int read_phase_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
868 int reg_addr, u32 mask)
869{
870 u32 data_value;
871 u32 if_id = 0, bus_id = 0;
872 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
873 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
874
875 /* multi CS support - reg_addr is calucalated in calling function with CS offset */
876 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
877 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
878 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) {
879 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
880 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
881 ACCESS_TYPE_UNICAST,
882 bus_id,
883 DDR_PHY_DATA, reg_addr,
884 &data_value));
885 pup_values[if_id * octets_per_if_num + bus_id] = data_value & mask;
886 }
887 }
888
889 return 0;
890}
891
892/**
893 * Write Leveling Value
894 */
895int write_leveling_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
896 u32 pup_ph_values[MAX_INTERFACE_NUM * MAX_BUS_NUM], int reg_addr)
897{
898 u32 if_id = 0, bus_id = 0;
899 u32 data;
900 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
901 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
902
903 /* multi CS support - reg_addr is calucalated in calling function with CS offset */
904 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
905 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
906 for (bus_id = 0 ; bus_id < octets_per_if_num ; bus_id++) {
907 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
908 data = pup_values[if_id * octets_per_if_num + bus_id] +
909 pup_ph_values[if_id * octets_per_if_num + bus_id];
910 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
911 ACCESS_TYPE_UNICAST,
912 if_id,
913 ACCESS_TYPE_UNICAST,
914 bus_id,
915 DDR_PHY_DATA,
916 reg_addr,
917 data));
918 }
919 }
920
921 return 0;
922}
923
924#if !defined(EXCLUDE_SWITCH_DEBUG)
925struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100926u32 start_xsb_offset = 0;
927u8 is_rl_old = 0;
928u8 is_freq_old = 0;
929u8 is_dfs_disabled = 0;
930u32 default_centrlization_value = 0x12;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100931u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
932 rl_test = 0, reset_read_fifo = 0;
933int debug_acc = 0;
934u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
935u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100936
937u32 xsb_test_table[][8] = {
938 {0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
939 0x66666666, 0x77777777},
940 {0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
941 0xeeeeeeee, 0xffffffff},
942 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
943 0x00000000, 0xffffffff},
944 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
945 0x00000000, 0xffffffff},
946 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
947 0x00000000, 0xffffffff},
948 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
949 0x00000000, 0xffffffff},
950 {0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
951 0xffffffff, 0xffffffff},
952 {0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
953 0x00000000, 0x00000000},
954 {0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
955 0xffffffff, 0xffffffff}
956};
957
Stefan Roese5ffceb82015-03-26 15:36:56 +0100958int ddr3_tip_print_adll(void)
959{
960 u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200961 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
962 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100963
964 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200965 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
966 for (bus_cnt = 0; bus_cnt < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100967 bus_cnt++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200968 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100969 CHECK_STATUS(ddr3_tip_bus_read
970 (dev_num, if_id,
971 ACCESS_TYPE_UNICAST, bus_cnt,
972 DDR_PHY_DATA, 0x1, &data_p1));
973 CHECK_STATUS(ddr3_tip_bus_read
974 (dev_num, if_id, ACCESS_TYPE_UNICAST,
975 bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
976 CHECK_STATUS(ddr3_tip_bus_read
977 (dev_num, if_id, ACCESS_TYPE_UNICAST,
978 bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
979 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
980 (" IF %d bus_cnt %d phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
981 if_id, bus_cnt, data_p1, data_p2,
982 ui_data3));
983 }
984 }
985
986 return MV_OK;
987}
988
Chris Packham1a07d212018-05-10 13:28:29 +1200989#endif /* EXCLUDE_SWITCH_DEBUG */
990
991#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100992/*
993 * Print ADLL
994 */
995int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
996{
997 u32 i, j;
Chris Packham1a07d212018-05-10 13:28:29 +1200998 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
999 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +01001000
Chris Packham1a07d212018-05-10 13:28:29 +12001001 for (j = 0; j < octets_per_if_num; j++) {
1002 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1003 for (i = 0; i < MAX_INTERFACE_NUM; i++)
1004 printf("%d ,", adll[i * octets_per_if_num + j]);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001005 }
1006 printf("\n");
1007
1008 return MV_OK;
1009}
Stefan Roese5ffceb82015-03-26 15:36:56 +01001010
Chris Packham1a07d212018-05-10 13:28:29 +12001011int print_ph(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1012{
1013 u32 i, j;
1014 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1015 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1016
1017 for (j = 0; j < octets_per_if_num; j++) {
1018 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1019 for (i = 0; i < MAX_INTERFACE_NUM; i++)
1020 printf("%d ,", adll[i * octets_per_if_num + j] >> 6);
1021 }
1022 printf("\n");
1023
1024 return MV_OK;
1025}
1026#endif /* DDR_VIEWER_TOOL */
1027
1028#if !defined(EXCLUDE_SWITCH_DEBUG)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001029/* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
1030static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
1031 u32 byte_index)
1032{
1033 u32 burst_cnt = 0, addr_offset, i_id;
1034 int b_is_fail = 0;
1035
1036 addr_offset =
1037 (byte_index ==
1038 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
1039 for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
1040 if ((p_src[burst_cnt] & addr_offset) !=
Chris Packham1a07d212018-05-10 13:28:29 +12001041 (p_dst[if_id] & addr_offset))
Stefan Roese5ffceb82015-03-26 15:36:56 +01001042 b_is_fail = 1;
1043 }
1044
1045 if (b_is_fail == 1) {
1046 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1047 ("IF %d exp: ", if_id));
1048 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1049 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1050 ("0x%8x ", p_src[i_id]));
1051 }
1052 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1053 ("\n_i_f %d rcv: ", if_id));
1054 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1055 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1056 ("(0x%8x ", p_dst[i_id]));
1057 }
1058 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
1059 }
1060
1061 return b_is_fail;
1062}
Chris Packham1a07d212018-05-10 13:28:29 +12001063#endif /* EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001064
Chris Packham1a07d212018-05-10 13:28:29 +12001065#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001066/*
1067 * Sweep validation
1068 */
1069int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
1070 u32 mode)
1071{
1072 u32 pup = 0, start_pup = 0, end_pup = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001073 u32 adll = 0, rep = 0, pattern_idx = 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001074 u32 res[MAX_INTERFACE_NUM] = { 0 };
1075 int if_id = 0;
1076 u32 adll_value = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001077 u32 reg;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001078 enum hws_access_type pup_access;
1079 u32 cs;
Chris Packham4bf81db2018-12-03 14:26:49 +13001080 unsigned int max_cs = mv_ddr_cs_num_get();
Chris Packham1a07d212018-05-10 13:28:29 +12001081 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1082 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1083
1084 repeat_num = 2;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001085
Stefan Roese5ffceb82015-03-26 15:36:56 +01001086 if (mode == 1) {
1087 /* per pup */
1088 start_pup = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001089 end_pup = octets_per_if_num - 1;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001090 pup_access = ACCESS_TYPE_UNICAST;
1091 } else {
1092 start_pup = 0;
1093 end_pup = 0;
1094 pup_access = ACCESS_TYPE_MULTICAST;
1095 }
1096
1097 for (cs = 0; cs < max_cs; cs++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001098 reg = (direction == 0) ? CTX_PHY_REG(cs) : CRX_PHY_REG(cs);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001099 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1100 for (if_id = 0;
1101 if_id <= MAX_INTERFACE_NUM - 1;
1102 if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001103 VALIDATE_IF_ACTIVE
Stefan Roese5ffceb82015-03-26 15:36:56 +01001104 (tm->if_act_mask,
1105 if_id);
1106 for (pup = start_pup; pup <= end_pup; pup++) {
1107 ctrl_sweepres[adll][if_id][pup] =
1108 0;
1109 }
1110 }
1111 }
1112
1113 for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
1114 ctrl_adll[adll] = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001115 /* Save DQS value(after algorithm run) */
1116 ddr3_tip_read_adll_value(dev_num, ctrl_adll,
1117 reg, MASK_ALL_BITS);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001118
1119 /*
1120 * Sweep ADLL from 0:31 on all I/F on all Pup and perform
1121 * BIST on each stage.
1122 */
1123 for (pup = start_pup; pup <= end_pup; pup++) {
1124 for (adll = 0; adll < ADLL_LENGTH; adll++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001125 for (rep = 0; rep < repeat_num; rep++) {
1126 for (pattern_idx = PATTERN_KILLER_DQ0;
1127 pattern_idx < PATTERN_LAST;
1128 pattern_idx++) {
1129 adll_value =
1130 (direction == 0) ? (adll * 2) : adll;
1131 CHECK_STATUS(ddr3_tip_bus_write
1132 (dev_num, ACCESS_TYPE_MULTICAST, 0,
1133 pup_access, pup, DDR_PHY_DATA,
1134 reg, adll_value));
1135 hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1136 cs);
1137 /* ddr3_tip_reset_fifo_ptr(dev_num); */
1138 for (if_id = 0;
1139 if_id < MAX_INTERFACE_NUM;
1140 if_id++) {
1141 VALIDATE_IF_ACTIVE
1142 (tm->if_act_mask,
1143 if_id);
1144 ctrl_sweepres[adll][if_id][pup]
1145 += res[if_id];
1146 if (mode == 1) {
1147 CHECK_STATUS
1148 (ddr3_tip_bus_write
1149 (dev_num,
1150 ACCESS_TYPE_UNICAST,
1151 if_id,
1152 ACCESS_TYPE_UNICAST,
1153 pup,
1154 DDR_PHY_DATA,
1155 reg,
1156 ctrl_adll[if_id *
1157 cs *
1158 octets_per_if_num
1159 + pup]));
1160 }
1161 }
Stefan Roese5ffceb82015-03-26 15:36:56 +01001162 }
1163 }
1164 }
1165 }
1166 printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1167 ((direction == 0) ? "TX" : "RX"));
1168 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001169 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001170 if (mode == 1) {
1171 for (pup = start_pup; pup <= end_pup; pup++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001172 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001173 printf("I/F%d-PHY%d , ", if_id, pup);
1174 }
1175 } else {
1176 printf("I/F%d , ", if_id);
1177 }
1178 }
1179 printf("\n");
1180
1181 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1182 adll_value = (direction == 0) ? (adll * 2) : adll;
1183 printf("Final,%s, Sweep, Result, %d ,",
1184 ((direction == 0) ? "TX" : "RX"), adll_value);
1185
1186 for (if_id = 0;
1187 if_id <= MAX_INTERFACE_NUM - 1;
1188 if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001189 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001190 for (pup = start_pup; pup <= end_pup; pup++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001191 printf("%8d , ",
Stefan Roese5ffceb82015-03-26 15:36:56 +01001192 ctrl_sweepres[adll][if_id]
1193 [pup]);
1194 }
1195 }
1196 printf("\n");
1197 }
1198
1199 /*
1200 * Write back to the phy the Rx DQS value, we store in
1201 * the beginning.
1202 */
Chris Packham1a07d212018-05-10 13:28:29 +12001203 ddr3_tip_write_adll_value(dev_num, ctrl_adll, reg);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001204 /* print adll results */
Chris Packham1a07d212018-05-10 13:28:29 +12001205 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001206 printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1207 print_adll(dev_num, ctrl_adll);
Chris Packham1a07d212018-05-10 13:28:29 +12001208 }
1209 ddr3_tip_reset_fifo_ptr(dev_num);
1210
1211 return 0;
1212}
1213
1214#if defined(EXCLUDE_SWITCH_DEBUG)
1215int ddr3_tip_run_leveling_sweep_test(int dev_num, u32 repeat_num,
1216 u32 direction, u32 mode)
1217{
1218 u32 pup = 0, start_pup = 0, end_pup = 0, start_adll = 0;
1219 u32 adll = 0, rep = 0, pattern_idx = 0;
1220 u32 read_data[MAX_INTERFACE_NUM];
1221 u32 res[MAX_INTERFACE_NUM] = { 0 };
1222 int if_id = 0, gap = 0;
1223 u32 adll_value = 0;
1224 u32 reg;
1225 enum hws_access_type pup_access;
1226 u32 cs;
Chris Packham4bf81db2018-12-03 14:26:49 +13001227 unsigned int max_cs = mv_ddr_cs_num_get();
Chris Packham1a07d212018-05-10 13:28:29 +12001228 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1229 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1230
1231 if (mode == 1) { /* per pup */
1232 start_pup = 0;
1233 end_pup = octets_per_if_num - 1;
1234 pup_access = ACCESS_TYPE_UNICAST;
1235 } else {
1236 start_pup = 0;
1237 end_pup = 0;
1238 pup_access = ACCESS_TYPE_MULTICAST;
1239 }
1240
1241 for (cs = 0; cs < max_cs; cs++) {
1242 reg = (direction == 0) ? WL_PHY_REG(cs) : RL_PHY_REG(cs);
1243 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1244 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1245 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1246 for (pup = start_pup; pup <= end_pup; pup++)
1247 ctrl_sweepres[adll][if_id][pup] = 0;
1248 }
1249 }
1250
1251 for (adll = 0; adll < MAX_INTERFACE_NUM * MAX_BUS_NUM; adll++) {
1252 ctrl_adll[adll] = 0;
1253 ctrl_level_phase[adll] = 0;
1254 ctrl_adll1[adll] = 0;
1255 }
1256
1257 /* save leveling value after running algorithm */
1258 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, 0x1f);
1259 read_phase_value(dev_num, ctrl_level_phase, reg, 0x7 << 6);
1260
1261 if (direction == 0)
1262 ddr3_tip_read_adll_value(dev_num, ctrl_adll1,
1263 CTX_PHY_REG(cs), MASK_ALL_BITS);
1264
1265 /* Sweep ADLL from 0 to 31 on all interfaces, all pups,
1266 * and perform BIST on each stage
1267 */
1268 for (pup = start_pup; pup <= end_pup; pup++) {
1269 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1270 for (rep = 0; rep < repeat_num; rep++) {
1271 adll_value = (direction == 0) ? (adll * 2) : (adll * 3);
1272 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1273 start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1274 (ctrl_level_phase[if_id * cs *
1275 octets_per_if_num +
1276 pup] >> 6) * 32;
1277
1278 if (direction == 0)
1279 start_adll = (start_adll > 32) ? (start_adll - 32) : 0;
1280 else
1281 start_adll = (start_adll > 48) ? (start_adll - 48) : 0;
1282
1283 adll_value += start_adll;
1284
1285 gap = ctrl_adll1[if_id * cs * octets_per_if_num + pup] -
1286 ctrl_adll[if_id * cs * octets_per_if_num + pup];
1287 gap = (((adll_value % 32) + gap) % 64);
1288
1289 adll_value = ((adll_value % 32) +
1290 (((adll_value - (adll_value % 32)) / 32) << 6));
1291
1292 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1293 ACCESS_TYPE_UNICAST,
1294 if_id,
1295 pup_access,
1296 pup,
1297 DDR_PHY_DATA,
1298 reg,
1299 adll_value));
1300 if (direction == 0)
1301 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1302 ACCESS_TYPE_UNICAST,
1303 if_id,
1304 pup_access,
1305 pup,
1306 DDR_PHY_DATA,
1307 CTX_PHY_REG(cs),
1308 gap));
1309 }
1310
1311 for (pattern_idx = PATTERN_KILLER_DQ0;
1312 pattern_idx < PATTERN_LAST;
1313 pattern_idx++) {
1314 hws_ddr3_run_bist(dev_num, sweep_pattern, res, cs);
1315 ddr3_tip_reset_fifo_ptr(dev_num);
1316 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1317 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1318 if (pup != 4) { /* TODO: remove literal */
1319 ctrl_sweepres[adll][if_id][pup] += res[if_id];
1320 } else {
1321 CHECK_STATUS(ddr3_tip_if_read(dev_num,
1322 ACCESS_TYPE_UNICAST,
1323 if_id,
1324 0x1458,
1325 read_data,
1326 MASK_ALL_BITS));
1327 ctrl_sweepres[adll][if_id][pup] += read_data[if_id];
1328 CHECK_STATUS(ddr3_tip_if_write(dev_num,
1329 ACCESS_TYPE_UNICAST,
1330 if_id,
1331 0x1458,
1332 0x0,
1333 0xFFFFFFFF));
1334 CHECK_STATUS(ddr3_tip_if_write(dev_num,
1335 ACCESS_TYPE_UNICAST,
1336 if_id,
1337 0x145C,
1338 0x0,
1339 0xFFFFFFFF));
1340 }
1341 }
1342 }
1343 }
1344 }
1345
1346 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1347 start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1348 ctrl_level_phase[if_id * cs * octets_per_if_num + pup];
1349 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, pup_access, pup,
1350 DDR_PHY_DATA, reg, start_adll));
1351 if (direction == 0)
1352 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1353 ACCESS_TYPE_UNICAST,
1354 if_id,
1355 pup_access,
1356 pup,
1357 DDR_PHY_DATA,
1358 CTX_PHY_REG(cs),
1359 ctrl_adll1[if_id *
1360 cs *
1361 octets_per_if_num +
1362 pup]));
1363 }
1364 }
1365
1366 printf("Final,CS %d,%s,Leveling,Result,Adll,", cs, ((direction == 0) ? "TX" : "RX"));
1367
1368 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1369 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1370 if (mode == 1) {
1371 for (pup = start_pup; pup <= end_pup; pup++) {
1372 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1373 printf("I/F%d-PHY%d , ", if_id, pup);
1374 }
1375 } else {
1376 printf("I/F%d , ", if_id);
1377 }
1378 }
1379 printf("\n");
1380
1381 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1382 adll_value = (direction == 0) ? ((adll * 2) - 32) : ((adll * 3) - 48);
1383 printf("Final,%s,LevelingSweep,Result, %d ,", ((direction == 0) ? "TX" : "RX"), adll_value);
1384
1385 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1386 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1387 for (pup = start_pup; pup <= end_pup; pup++)
1388 printf("%8d , ", ctrl_sweepres[adll][if_id][pup]);
1389 }
1390 printf("\n");
1391 }
1392
1393 /* write back to the phy the Rx DQS value, we store in the beginning */
1394 write_leveling_value(dev_num, ctrl_adll, ctrl_level_phase, reg);
1395 if (direction == 0)
1396 ddr3_tip_write_adll_value(dev_num, ctrl_adll1, CTX_PHY_REG(cs));
1397
1398 /* print adll results */
1399 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1400 printf("%s,DQS,Leveling,,,", (direction == 0) ? "Tx" : "Rx");
1401 print_adll(dev_num, ctrl_adll);
1402 print_ph(dev_num, ctrl_level_phase);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001403 }
1404 ddr3_tip_reset_fifo_ptr(dev_num);
1405
1406 return 0;
1407}
Chris Packham1a07d212018-05-10 13:28:29 +12001408#endif /* EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001409
Chris Packham1a07d212018-05-10 13:28:29 +12001410void print_topology(struct mv_ddr_topology_map *topology_db)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001411{
1412 u32 ui, uj;
Chris Packham1a07d212018-05-10 13:28:29 +12001413 u32 dev_num = 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001414
1415 printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
Chris Packham1a07d212018-05-10 13:28:29 +12001416 printf("\tNumber of buses: 0x%x\n",
1417 ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE));
Stefan Roese5ffceb82015-03-26 15:36:56 +01001418 printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1419
1420 for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001421 VALIDATE_IF_ACTIVE(topology_db->if_act_mask, ui);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001422 printf("\n\tInterface ID: %d\n", ui);
1423 printf("\t\tDDR Frequency: %s\n",
1424 convert_freq(topology_db->
1425 interface_params[ui].memory_freq));
1426 printf("\t\tSpeed_bin: %d\n",
1427 topology_db->interface_params[ui].speed_bin_index);
1428 printf("\t\tBus_width: %d\n",
1429 (4 << topology_db->interface_params[ui].bus_width));
1430 printf("\t\tMem_size: %s\n",
1431 convert_mem_size(topology_db->
1432 interface_params[ui].memory_size));
1433 printf("\t\tCAS-WL: %d\n",
1434 topology_db->interface_params[ui].cas_wl);
1435 printf("\t\tCAS-L: %d\n",
1436 topology_db->interface_params[ui].cas_l);
1437 printf("\t\tTemperature: %d\n",
1438 topology_db->interface_params[ui].interface_temp);
1439 printf("\n");
1440 for (uj = 0; uj < 4; uj++) {
1441 printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1442 topology_db->interface_params[ui].
1443 as_bus_params[uj].cs_bitmask);
1444 printf("Mirror: 0x%x\t",
1445 topology_db->interface_params[ui].
1446 as_bus_params[uj].mirror_enable_bitmask);
1447 printf("DQS Swap is %s \t",
1448 (topology_db->
1449 interface_params[ui].as_bus_params[uj].
1450 is_dqs_swap == 1) ? "enabled" : "disabled");
1451 printf("Ck Swap:%s\t",
1452 (topology_db->
1453 interface_params[ui].as_bus_params[uj].
1454 is_ck_swap == 1) ? "enabled" : "disabled");
1455 printf("\n");
1456 }
1457 }
1458}
Chris Packham1a07d212018-05-10 13:28:29 +12001459#endif /* DDR_VIEWER_TOOL */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001460
Chris Packham1a07d212018-05-10 13:28:29 +12001461#if !defined(EXCLUDE_SWITCH_DEBUG)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001462/*
1463 * Execute XSB Test transaction (rd/wr/both)
1464 */
1465int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1466 u32 read_type, u32 burst_length)
1467{
1468 u32 seq = 0, if_id = 0, addr, cnt;
1469 int ret = MV_OK, ret_tmp;
1470 u32 data_read[MAX_INTERFACE_NUM];
Chris Packham1a07d212018-05-10 13:28:29 +12001471 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +01001472
1473 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001474 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001475 addr = mem_addr;
1476 for (cnt = 0; cnt <= burst_length; cnt++) {
1477 seq = (seq + 1) % 8;
1478 if (write_type != 0) {
1479 CHECK_STATUS(ddr3_tip_ext_write
1480 (dev_num, if_id, addr, 1,
1481 xsb_test_table[seq]));
1482 }
1483 if (read_type != 0) {
1484 CHECK_STATUS(ddr3_tip_ext_read
1485 (dev_num, if_id, addr, 1,
1486 data_read));
1487 }
1488 if ((read_type != 0) && (write_type != 0)) {
1489 ret_tmp =
1490 ddr3_tip_compare(if_id,
1491 xsb_test_table[seq],
1492 data_read,
1493 0xff);
1494 addr += (EXT_ACCESS_BURST_LENGTH * 4);
1495 ret = (ret != MV_OK) ? ret : ret_tmp;
1496 }
1497 }
1498 }
1499
1500 return ret;
1501}
1502
1503#else /*EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001504u32 start_xsb_offset = 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001505
1506int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1507 u32 read_type, u32 burst_length)
1508{
1509 return MV_OK;
1510}
1511
Chris Packham1a07d212018-05-10 13:28:29 +12001512#endif /* EXCLUDE_SWITCH_DEBUG */