blob: d32d42c408fe3d98d53422e0166ea94947381d3f [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Stefan Roese5ffceb82015-03-26 15:36:56 +01002/*
3 * Copyright (C) Marvell International Ltd. and its affiliates
Stefan Roese5ffceb82015-03-26 15:36:56 +01004 */
5
Stefan Roese5ffceb82015-03-26 15:36:56 +01006#include "ddr3_init.h"
Chris Packham4bf81db2018-12-03 14:26:49 +13007#include "mv_ddr_training_db.h"
8#include "mv_ddr_regs.h"
Stefan Roese5ffceb82015-03-26 15:36:56 +01009
10u8 is_reg_dump = 0;
11u8 debug_pbs = DEBUG_LEVEL_ERROR;
12
13/*
14 * API to change flags outside of the lib
15 */
Chris Packham1a07d212018-05-10 13:28:29 +120016#if defined(SILENT_LIB)
17void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
18{
19 /* do nothing */
20}
21#else /* SILENT_LIB */
Stefan Roese5ffceb82015-03-26 15:36:56 +010022/* Debug flags for other Training modules */
23u8 debug_training_static = DEBUG_LEVEL_ERROR;
24u8 debug_training = DEBUG_LEVEL_ERROR;
25u8 debug_leveling = DEBUG_LEVEL_ERROR;
26u8 debug_centralization = DEBUG_LEVEL_ERROR;
27u8 debug_training_ip = DEBUG_LEVEL_ERROR;
28u8 debug_training_bist = DEBUG_LEVEL_ERROR;
29u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
30u8 debug_training_access = DEBUG_LEVEL_ERROR;
Chris Packham1a07d212018-05-10 13:28:29 +120031u8 debug_training_device = DEBUG_LEVEL_ERROR;
32
Tony Dinhe2c524b2023-01-18 19:03:04 -080033#if defined(CONFIG_DDR4)
34u8 debug_tap_tuning = DEBUG_LEVEL_ERROR;
35u8 debug_calibration = DEBUG_LEVEL_ERROR;
36u8 debug_ddr4_centralization = DEBUG_LEVEL_ERROR;
37u8 debug_dm_tuning = DEBUG_LEVEL_ERROR;
38#endif /* CONFIG_DDR4 */
Chris Packham1a07d212018-05-10 13:28:29 +120039
40void mv_ddr_user_log_level_set(enum ddr_lib_debug_block block)
41{
42 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
43 ddr3_hws_set_log_level(block, tm->debug_level);
44};
Stefan Roese5ffceb82015-03-26 15:36:56 +010045
46void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
47{
48 switch (block) {
49 case DEBUG_BLOCK_STATIC:
50 debug_training_static = level;
51 break;
52 case DEBUG_BLOCK_TRAINING_MAIN:
53 debug_training = level;
54 break;
55 case DEBUG_BLOCK_LEVELING:
56 debug_leveling = level;
57 break;
58 case DEBUG_BLOCK_CENTRALIZATION:
59 debug_centralization = level;
60 break;
61 case DEBUG_BLOCK_PBS:
62 debug_pbs = level;
63 break;
64 case DEBUG_BLOCK_ALG:
65 debug_training_hw_alg = level;
66 break;
67 case DEBUG_BLOCK_DEVICE:
Chris Packham1a07d212018-05-10 13:28:29 +120068 debug_training_device = level;
Stefan Roese5ffceb82015-03-26 15:36:56 +010069 break;
70 case DEBUG_BLOCK_ACCESS:
71 debug_training_access = level;
72 break;
73 case DEBUG_STAGES_REG_DUMP:
74 if (level == DEBUG_LEVEL_TRACE)
75 is_reg_dump = 1;
76 else
77 is_reg_dump = 0;
78 break;
Tony Dinhe2c524b2023-01-18 19:03:04 -080079#if defined(CONFIG_DDR4)
80 case DEBUG_TAP_TUNING_ENGINE:
81 debug_tap_tuning = level;
82 break;
83 case DEBUG_BLOCK_CALIBRATION:
84 debug_calibration = level;
85 break;
86 case DEBUG_BLOCK_DDR4_CENTRALIZATION:
87 debug_ddr4_centralization = level;
88 break;
89#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +010090 case DEBUG_BLOCK_ALL:
91 default:
92 debug_training_static = level;
93 debug_training = level;
94 debug_leveling = level;
95 debug_centralization = level;
96 debug_pbs = level;
97 debug_training_hw_alg = level;
98 debug_training_access = level;
Chris Packham1a07d212018-05-10 13:28:29 +120099 debug_training_device = level;
Tony Dinhe2c524b2023-01-18 19:03:04 -0800100#if defined(CONFIG_DDR4)
101 debug_tap_tuning = level;
102 debug_calibration = level;
103 debug_ddr4_centralization = level;
104#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100105 }
106}
Chris Packham1a07d212018-05-10 13:28:29 +1200107#endif /* SILENT_LIB */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100108
Chris Packham1a07d212018-05-10 13:28:29 +1200109#if defined(DDR_VIEWER_TOOL)
Chris Packham4bf81db2018-12-03 14:26:49 +1300110static char *convert_freq(enum mv_ddr_freq freq);
Chris Packham1a07d212018-05-10 13:28:29 +1200111#if defined(EXCLUDE_SWITCH_DEBUG)
112u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
113u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
114u32 ctrl_adll1[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
115u32 ctrl_level_phase[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
116#endif /* EXCLUDE_SWITCH_DEBUG */
Marek BehĂșn11ffc382024-06-18 17:34:27 +0200117
118static u8 is_validate_window_per_if = 0;
119static u8 is_validate_window_per_pup = 0;
120static u8 sweep_cnt = 1;
121static u8 is_run_leveling_sweep_tests;
Chris Packham1a07d212018-05-10 13:28:29 +1200122#endif /* DDR_VIEWER_TOOL */
123
124struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
Chris Packham1a07d212018-05-10 13:28:29 +1200125
126static struct hws_xsb_info xsb_info[MAX_DEVICE_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100127
128/*
129 * Dump Dunit & Phy registers
130 */
131int ddr3_tip_reg_dump(u32 dev_num)
132{
133 u32 if_id, reg_addr, data_value, bus_id;
134 u32 read_data[MAX_INTERFACE_NUM];
Chris Packham1a07d212018-05-10 13:28:29 +1200135 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
136 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100137
138 printf("-- dunit registers --\n");
139 for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
140 printf("0x%x ", reg_addr);
141 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200142 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100143 CHECK_STATUS(ddr3_tip_if_read
144 (dev_num, ACCESS_TYPE_UNICAST,
145 if_id, reg_addr, read_data,
146 MASK_ALL_BITS));
147 printf("0x%x ", read_data[if_id]);
148 }
149 printf("\n");
150 }
151
152 printf("-- Phy registers --\n");
153 for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
154 printf("0x%x ", reg_addr);
155 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200156 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100157 for (bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200158 bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100159 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200160 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100161 CHECK_STATUS(ddr3_tip_bus_read
162 (dev_num, if_id,
163 ACCESS_TYPE_UNICAST, bus_id,
164 DDR_PHY_DATA, reg_addr,
165 &data_value));
166 printf("0x%x ", data_value);
167 }
168 for (bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200169 bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100170 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200171 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100172 CHECK_STATUS(ddr3_tip_bus_read
173 (dev_num, if_id,
174 ACCESS_TYPE_UNICAST, bus_id,
175 DDR_PHY_CONTROL, reg_addr,
176 &data_value));
177 printf("0x%x ", data_value);
178 }
179 }
180 printf("\n");
181 }
182
183 return MV_OK;
184}
185
186/*
187 * Register access func registration
188 */
189int ddr3_tip_init_config_func(u32 dev_num,
190 struct hws_tip_config_func_db *config_func)
191{
192 if (config_func == NULL)
193 return MV_BAD_PARAM;
194
195 memcpy(&config_func_info[dev_num], config_func,
196 sizeof(struct hws_tip_config_func_db));
197
198 return MV_OK;
199}
200
201/*
Stefan Roese5ffceb82015-03-26 15:36:56 +0100202 * Get training result info pointer
203 */
204enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
205{
206 return training_result[stage];
207}
208
209/*
210 * Device info read
211 */
212int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
213{
214 if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
215 return config_func_info[dev_num].
216 tip_get_device_info_func((u8) dev_num, info_ptr);
217 }
218
219 return MV_FAIL;
220}
221
Chris Packham1a07d212018-05-10 13:28:29 +1200222#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100223/*
224 * Convert freq to character string
225 */
Chris Packham4bf81db2018-12-03 14:26:49 +1300226static char *convert_freq(enum mv_ddr_freq freq)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100227{
228 switch (freq) {
Chris Packham4bf81db2018-12-03 14:26:49 +1300229 case MV_DDR_FREQ_LOW_FREQ:
230 return "MV_DDR_FREQ_LOW_FREQ";
Chris Packham1a07d212018-05-10 13:28:29 +1200231
Tony Dinhe2c524b2023-01-18 19:03:04 -0800232#if !defined(CONFIG_DDR4)
Chris Packham4bf81db2018-12-03 14:26:49 +1300233 case MV_DDR_FREQ_400:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100234 return "400";
235
Chris Packham4bf81db2018-12-03 14:26:49 +1300236 case MV_DDR_FREQ_533:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100237 return "533";
Tony Dinhe2c524b2023-01-18 19:03:04 -0800238#endif /* CONFIG_DDR4 */
Chris Packham1a07d212018-05-10 13:28:29 +1200239
Chris Packham4bf81db2018-12-03 14:26:49 +1300240 case MV_DDR_FREQ_667:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100241 return "667";
242
Chris Packham4bf81db2018-12-03 14:26:49 +1300243 case MV_DDR_FREQ_800:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100244 return "800";
245
Chris Packham4bf81db2018-12-03 14:26:49 +1300246 case MV_DDR_FREQ_933:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100247 return "933";
248
Chris Packham4bf81db2018-12-03 14:26:49 +1300249 case MV_DDR_FREQ_1066:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100250 return "1066";
Chris Packham1a07d212018-05-10 13:28:29 +1200251
Tony Dinhe2c524b2023-01-18 19:03:04 -0800252#if !defined(CONFIG_DDR4)
Chris Packham4bf81db2018-12-03 14:26:49 +1300253 case MV_DDR_FREQ_311:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100254 return "311";
255
Chris Packham4bf81db2018-12-03 14:26:49 +1300256 case MV_DDR_FREQ_333:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100257 return "333";
258
Chris Packham4bf81db2018-12-03 14:26:49 +1300259 case MV_DDR_FREQ_467:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100260 return "467";
261
Chris Packham4bf81db2018-12-03 14:26:49 +1300262 case MV_DDR_FREQ_850:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100263 return "850";
264
Chris Packham4bf81db2018-12-03 14:26:49 +1300265 case MV_DDR_FREQ_900:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100266 return "900";
267
Chris Packham4bf81db2018-12-03 14:26:49 +1300268 case MV_DDR_FREQ_360:
269 return "MV_DDR_FREQ_360";
Stefan Roese5ffceb82015-03-26 15:36:56 +0100270
Chris Packham4bf81db2018-12-03 14:26:49 +1300271 case MV_DDR_FREQ_1000:
272 return "MV_DDR_FREQ_1000";
Tony Dinhe2c524b2023-01-18 19:03:04 -0800273#endif /* CONFIG_DDR4 */
Chris Packham1a07d212018-05-10 13:28:29 +1200274
Stefan Roese5ffceb82015-03-26 15:36:56 +0100275 default:
276 return "Unknown Frequency";
277 }
278}
279
280/*
281 * Convert device ID to character string
282 */
283static char *convert_dev_id(u32 dev_id)
284{
285 switch (dev_id) {
286 case 0x6800:
287 return "A38xx";
288 case 0x6900:
289 return "A39XX";
290 case 0xf400:
291 return "AC3";
292 case 0xfc00:
293 return "BC2";
294
295 default:
296 return "Unknown Device";
297 }
298}
299
300/*
301 * Convert device ID to character string
302 */
303static char *convert_mem_size(u32 dev_id)
304{
305 switch (dev_id) {
306 case 0:
307 return "512 MB";
308 case 1:
309 return "1 GB";
310 case 2:
311 return "2 GB";
312 case 3:
313 return "4 GB";
314 case 4:
315 return "8 GB";
316
317 default:
318 return "wrong mem size";
319 }
320}
321
322int print_device_info(u8 dev_num)
323{
324 struct ddr3_device_info info_ptr;
Chris Packham1a07d212018-05-10 13:28:29 +1200325 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100326
327 CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
328 printf("=== DDR setup START===\n");
329 printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
330 printf("\tDDR3 CK delay: %d\n", info_ptr.ck_delay);
331 print_topology(tm);
332 printf("=== DDR setup END===\n");
333
334 return MV_OK;
335}
336
337void hws_ddr3_tip_sweep_test(int enable)
338{
339 if (enable) {
340 is_validate_window_per_if = 1;
341 is_validate_window_per_pup = 1;
342 debug_training = DEBUG_LEVEL_TRACE;
343 } else {
344 is_validate_window_per_if = 0;
345 is_validate_window_per_pup = 0;
346 }
347}
Chris Packham1a07d212018-05-10 13:28:29 +1200348#endif /* DDR_VIEWER_TOOL */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100349
350char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
351{
352 switch (tune_result) {
353 case TEST_FAILED:
354 return "FAILED";
355 case TEST_SUCCESS:
356 return "PASS";
357 case NO_TEST_DONE:
358 return "NOT COMPLETED";
359 default:
360 return "Un-KNOWN";
361 }
362}
363
364/*
365 * Print log info
366 */
367int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
368{
369 u32 if_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200370 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100371
Chris Packham1a07d212018-05-10 13:28:29 +1200372#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100373 if ((is_validate_window_per_if != 0) ||
374 (is_validate_window_per_pup != 0)) {
375 u32 is_pup_log = 0;
Chris Packham4bf81db2018-12-03 14:26:49 +1300376 enum mv_ddr_freq freq;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100377
378 freq = tm->interface_params[first_active_if].memory_freq;
379
380 is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
381 printf("===VALIDATE WINDOW LOG START===\n");
382 printf("DDR Frequency: %s ======\n", convert_freq(freq));
383 /* print sweep windows */
384 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
385 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
Chris Packham1a07d212018-05-10 13:28:29 +1200386#if defined(EXCLUDE_SWITCH_DEBUG)
387 if (is_run_leveling_sweep_tests == 1) {
388 ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
389 ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
390 }
391#endif /* EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100392 ddr3_tip_print_all_pbs_result(dev_num);
393 ddr3_tip_print_wl_supp_result(dev_num);
394 printf("===VALIDATE WINDOW LOG END ===\n");
395 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
396 ddr3_tip_reg_dump(dev_num);
397 }
Chris Packham1a07d212018-05-10 13:28:29 +1200398#endif /* DDR_VIEWER_TOOL */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100399
Marek BehĂșnbbafabc2024-06-18 17:34:25 +0200400 /* return early if we won't print anything anyway */
401 if (
402#if defined(SILENT_LIB)
403 1 ||
404#endif
405 debug_training < DEBUG_LEVEL_INFO) {
406 return MV_OK;
407 }
408
Stefan Roese5ffceb82015-03-26 15:36:56 +0100409 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200410 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100411
412 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
413 ("IF %d Status:\n", if_id));
414
415 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
416 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
417 ("\tInit Controller: %s\n",
418 ddr3_tip_convert_tune_result
419 (training_result[INIT_CONTROLLER]
420 [if_id])));
421 }
422 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
423 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
424 ("\tLow freq Config: %s\n",
425 ddr3_tip_convert_tune_result
426 (training_result[SET_LOW_FREQ]
427 [if_id])));
428 }
429 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
430 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
431 ("\tLoad Pattern: %s\n",
432 ddr3_tip_convert_tune_result
433 (training_result[LOAD_PATTERN]
434 [if_id])));
435 }
436 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
437 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
438 ("\tMedium freq Config: %s\n",
439 ddr3_tip_convert_tune_result
440 (training_result[SET_MEDIUM_FREQ]
441 [if_id])));
442 }
443 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
444 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
445 ("\tWL: %s\n",
446 ddr3_tip_convert_tune_result
447 (training_result[WRITE_LEVELING]
448 [if_id])));
449 }
450 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
451 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
452 ("\tLoad Pattern: %s\n",
453 ddr3_tip_convert_tune_result
454 (training_result[LOAD_PATTERN_2]
455 [if_id])));
456 }
457 if (mask_tune_func & READ_LEVELING_MASK_BIT) {
458 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
459 ("\tRL: %s\n",
460 ddr3_tip_convert_tune_result
461 (training_result[READ_LEVELING]
462 [if_id])));
463 }
464 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
465 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
466 ("\tWL Supp: %s\n",
467 ddr3_tip_convert_tune_result
468 (training_result[WRITE_LEVELING_SUPP]
469 [if_id])));
470 }
471 if (mask_tune_func & PBS_RX_MASK_BIT) {
472 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
473 ("\tPBS RX: %s\n",
474 ddr3_tip_convert_tune_result
475 (training_result[PBS_RX]
476 [if_id])));
477 }
478 if (mask_tune_func & PBS_TX_MASK_BIT) {
479 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
480 ("\tPBS TX: %s\n",
481 ddr3_tip_convert_tune_result
482 (training_result[PBS_TX]
483 [if_id])));
484 }
485 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
486 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
487 ("\tTarget freq Config: %s\n",
488 ddr3_tip_convert_tune_result
489 (training_result[SET_TARGET_FREQ]
490 [if_id])));
491 }
492 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
493 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
494 ("\tWL TF: %s\n",
495 ddr3_tip_convert_tune_result
496 (training_result[WRITE_LEVELING_TF]
497 [if_id])));
498 }
Tony Dinhe2c524b2023-01-18 19:03:04 -0800499#if !defined(CONFIG_DDR4)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100500 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
501 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
502 ("\tRL TF: %s\n",
503 ddr3_tip_convert_tune_result
504 (training_result[READ_LEVELING_TF]
505 [if_id])));
506 }
Tony Dinhe2c524b2023-01-18 19:03:04 -0800507#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100508 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
509 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
510 ("\tWL TF Supp: %s\n",
511 ddr3_tip_convert_tune_result
512 (training_result
513 [WRITE_LEVELING_SUPP_TF]
514 [if_id])));
515 }
516 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
517 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
518 ("\tCentr RX: %s\n",
519 ddr3_tip_convert_tune_result
520 (training_result[CENTRALIZATION_RX]
521 [if_id])));
522 }
523 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
524 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
525 ("\tVREF_CALIBRATION: %s\n",
526 ddr3_tip_convert_tune_result
527 (training_result[VREF_CALIBRATION]
528 [if_id])));
529 }
530 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
531 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
532 ("\tCentr TX: %s\n",
533 ddr3_tip_convert_tune_result
534 (training_result[CENTRALIZATION_TX]
535 [if_id])));
536 }
Tony Dinhe2c524b2023-01-18 19:03:04 -0800537#if defined(CONFIG_DDR4)
538 if (mask_tune_func & SW_READ_LEVELING_MASK_BIT) {
539 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
540 ("\tSW RL TF: %s\n",
541 ddr3_tip_convert_tune_result
542 (training_result[SW_READ_LEVELING]
543 [if_id])));
544 }
545 if (mask_tune_func & RECEIVER_CALIBRATION_MASK_BIT) {
546 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
547 ("\tRX CAL: %s\n",
548 ddr3_tip_convert_tune_result
549 (training_result[RECEIVER_CALIBRATION]
550 [if_id])));
551 }
552 if (mask_tune_func & WL_PHASE_CORRECTION_MASK_BIT) {
553 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
554 ("\tWL PHASE CORRECT: %s\n",
555 ddr3_tip_convert_tune_result
556 (training_result[WL_PHASE_CORRECTION]
557 [if_id])));
558 }
559 if (mask_tune_func & DQ_VREF_CALIBRATION_MASK_BIT) {
560 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
561 ("\tDQ VREF CAL: %s\n",
562 ddr3_tip_convert_tune_result
563 (training_result[DQ_VREF_CALIBRATION]
564 [if_id])));
565 }
566 if (mask_tune_func & DQ_MAPPING_MASK_BIT) {
567 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
568 ("\tDQ MAP: %s\n",
569 ddr3_tip_convert_tune_result
570 (training_result[DQ_MAPPING]
571 [if_id])));
572 }
573#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100574 }
575
576 return MV_OK;
577}
578
Chris Packham1a07d212018-05-10 13:28:29 +1200579#if !defined(EXCLUDE_DEBUG_PRINTS)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100580/*
581 * Print stability log info
582 */
583int ddr3_tip_print_stability_log(u32 dev_num)
584{
585 u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
586 u32 reg_data;
Tony Dinhe2c524b2023-01-18 19:03:04 -0800587#if defined(CONFIG_DDR4)
588 u32 reg_data1;
589#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100590 u32 read_data[MAX_INTERFACE_NUM];
Chris Packham4bf81db2018-12-03 14:26:49 +1300591 unsigned int max_cs = mv_ddr_cs_num_get();
Chris Packham1a07d212018-05-10 13:28:29 +1200592 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100593
594 /* Title print */
595 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200596 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100597 printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
598 for (csindex = 0; csindex < max_cs; csindex++) {
599 printf("CS%d , ", csindex);
600 printf("\n");
Chris Packham1a07d212018-05-10 13:28:29 +1200601 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Tony Dinhe2c524b2023-01-18 19:03:04 -0800602#if defined(CONFIG_DDR4)
603 printf("DminTx, AreaTx, DminRx, AreaRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, CenTx, CenRx, Vref, DQVref,");
604 for (idx = 0; idx < 11; idx++)
605 printf("DC-Pad%d,", idx);
606#else /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100607 printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
Tony Dinhe2c524b2023-01-18 19:03:04 -0800608#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100609 printf("\t\t");
610 for (idx = 0; idx < 11; idx++)
611 printf("PBSTx-Pad%d,", idx);
612 printf("\t\t");
613 for (idx = 0; idx < 11; idx++)
614 printf("PBSRx-Pad%d,", idx);
615 }
616 }
617 printf("\n");
618
619 /* Data print */
620 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200621 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100622
623 printf("Data: %d,%d,", if_id,
624 (config_func_info[dev_num].tip_get_temperature != NULL)
625 ? (config_func_info[dev_num].
626 tip_get_temperature(dev_num)) : (0));
627
628 CHECK_STATUS(ddr3_tip_if_read
629 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
630 read_data, MASK_ALL_BITS));
631 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
632 ((read_data[if_id] & 0xfc00) >> 10));
633 CHECK_STATUS(ddr3_tip_if_read
634 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
635 read_data, MASK_ALL_BITS));
636 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
637 ((read_data[if_id] & 0xfc00) >> 10));
638 CHECK_STATUS(ddr3_tip_if_read
639 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
640 read_data, MASK_ALL_BITS));
641 printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
642 ((read_data[if_id] & 0xfc00000) >> 22));
643
644 for (csindex = 0; csindex < max_cs; csindex++) {
645 printf("CS%d , ", csindex);
646 for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
647 printf("\n");
Chris Packham1a07d212018-05-10 13:28:29 +1200648 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Tony Dinhe2c524b2023-01-18 19:03:04 -0800649#if defined(CONFIG_DDR4)
650 /* DminTx, areaTX */
651 ddr3_tip_bus_read(dev_num, if_id,
652 ACCESS_TYPE_UNICAST,
653 bus_id, DDR_PHY_DATA,
654 RESULT_PHY_REG +
655 csindex, &reg_data);
656 ddr3_tip_bus_read(dev_num, if_id,
657 ACCESS_TYPE_UNICAST,
658 dmin_phy_reg_table
659 [csindex * 5 + bus_id][0],
660 DDR_PHY_CONTROL,
661 dmin_phy_reg_table
662 [csindex * 5 + bus_id][1],
663 &reg_data1);
664 printf("%d,%d,", 2 * (reg_data1 & 0xFF),
665 reg_data);
666 /* DminRx, areaRX */
667 ddr3_tip_bus_read(dev_num, if_id,
668 ACCESS_TYPE_UNICAST,
669 bus_id, DDR_PHY_DATA,
670 RESULT_PHY_REG +
671 csindex + 4, &reg_data);
672 ddr3_tip_bus_read(dev_num, if_id,
673 ACCESS_TYPE_UNICAST,
674 dmin_phy_reg_table
675 [csindex * 5 + bus_id][0],
676 DDR_PHY_CONTROL,
677 dmin_phy_reg_table
678 [csindex * 5 + bus_id][1],
679 &reg_data1);
680 printf("%d,%d,", 2 * (reg_data1 >> 8),
681 reg_data);
682#else /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100683 ddr3_tip_bus_read(dev_num, if_id,
684 ACCESS_TYPE_UNICAST,
685 bus_id, DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200686 RESULT_PHY_REG +
Stefan Roese5ffceb82015-03-26 15:36:56 +0100687 csindex, &reg_data);
688 printf("%d,%d,", (reg_data & 0x1f),
689 ((reg_data & 0x3e0) >> 5));
Tony Dinhe2c524b2023-01-18 19:03:04 -0800690#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100691 /* WL */
692 ddr3_tip_bus_read(dev_num, if_id,
693 ACCESS_TYPE_UNICAST,
694 bus_id, DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200695 WL_PHY_REG(csindex),
696 &reg_data);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100697 printf("%d,%d,%d,",
698 (reg_data & 0x1f) +
699 ((reg_data & 0x1c0) >> 6) * 32,
700 (reg_data & 0x1f),
701 (reg_data & 0x1c0) >> 6);
702 /* RL */
703 CHECK_STATUS(ddr3_tip_if_read
704 (dev_num, ACCESS_TYPE_UNICAST,
705 if_id,
Chris Packham1a07d212018-05-10 13:28:29 +1200706 RD_DATA_SMPL_DLYS_REG,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100707 read_data, MASK_ALL_BITS));
708 read_data[if_id] =
709 (read_data[if_id] &
Chris Packham1a07d212018-05-10 13:28:29 +1200710 (0x1f << (8 * csindex))) >>
711 (8 * csindex);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100712 ddr3_tip_bus_read(dev_num, if_id,
713 ACCESS_TYPE_UNICAST, bus_id,
714 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200715 RL_PHY_REG(csindex),
Stefan Roese5ffceb82015-03-26 15:36:56 +0100716 &reg_data);
717 printf("%d,%d,%d,%d,",
718 (reg_data & 0x1f) +
719 ((reg_data & 0x1c0) >> 6) * 32 +
720 read_data[if_id] * 64,
721 (reg_data & 0x1f),
722 ((reg_data & 0x1c0) >> 6),
723 read_data[if_id]);
724 /* Centralization */
725 ddr3_tip_bus_read(dev_num, if_id,
726 ACCESS_TYPE_UNICAST, bus_id,
727 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200728 CTX_PHY_REG(csindex),
729 &reg_data);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100730 printf("%d,", (reg_data & 0x3f));
731 ddr3_tip_bus_read(dev_num, if_id,
732 ACCESS_TYPE_UNICAST, bus_id,
733 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200734 CRX_PHY_REG(csindex),
735 &reg_data);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100736 printf("%d,", (reg_data & 0x1f));
737 /* Vref */
738 ddr3_tip_bus_read(dev_num, if_id,
739 ACCESS_TYPE_UNICAST, bus_id,
740 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200741 PAD_CFG_PHY_REG,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100742 &reg_data);
743 printf("%d,", (reg_data & 0x7));
744 /* DQVref */
745 /* Need to add the Read Function from device */
746 printf("%d,", 0);
Tony Dinhe2c524b2023-01-18 19:03:04 -0800747#if defined(CONFIG_DDR4)
748 printf("\t\t");
749 for (idx = 0; idx < 11; idx++) {
750 ddr3_tip_bus_read(dev_num, if_id,
751 ACCESS_TYPE_UNICAST,
752 bus_id, DDR_PHY_DATA,
753 0xd0 + 12 * csindex +
754 idx, &reg_data);
755 printf("%d,", (reg_data & 0x3f));
756 }
757#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100758 printf("\t\t");
759 for (idx = 0; idx < 11; idx++) {
760 ddr3_tip_bus_read(dev_num, if_id,
761 ACCESS_TYPE_UNICAST,
762 bus_id, DDR_PHY_DATA,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100763 0x10 +
764 16 * csindex +
765 idx, &reg_data);
766 printf("%d,", (reg_data & 0x3f));
767 }
768 printf("\t\t");
769 for (idx = 0; idx < 11; idx++) {
770 ddr3_tip_bus_read(dev_num, if_id,
771 ACCESS_TYPE_UNICAST,
772 bus_id, DDR_PHY_DATA,
773 0x50 +
774 16 * csindex +
775 idx, &reg_data);
776 printf("%d,", (reg_data & 0x3f));
777 }
778 }
779 }
780 }
781 printf("\n");
782
783 return MV_OK;
784}
Chris Packham1a07d212018-05-10 13:28:29 +1200785#endif /* EXCLUDE_DEBUG_PRINTS */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100786
787/*
788 * Register XSB information
789 */
790int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
791{
792 memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
793 return MV_OK;
794}
795
796/*
797 * Read ADLL Value
798 */
Chris Packham1a07d212018-05-10 13:28:29 +1200799int ddr3_tip_read_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
800 u32 reg_addr, u32 mask)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100801{
802 u32 data_value;
803 u32 if_id = 0, bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200804 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
805 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100806
807 /*
808 * multi CS support - reg_addr is calucalated in calling function
809 * with CS offset
810 */
811 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200812 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
813 for (bus_id = 0; bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100814 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200815 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100816 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
817 ACCESS_TYPE_UNICAST,
818 bus_id,
819 DDR_PHY_DATA, reg_addr,
820 &data_value));
821 pup_values[if_id *
Chris Packham1a07d212018-05-10 13:28:29 +1200822 octets_per_if_num + bus_id] =
Stefan Roese5ffceb82015-03-26 15:36:56 +0100823 data_value & mask;
824 }
825 }
826
827 return 0;
828}
829
830/*
831 * Write ADLL Value
832 */
Chris Packham1a07d212018-05-10 13:28:29 +1200833int ddr3_tip_write_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
834 u32 reg_addr)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100835{
836 u32 if_id = 0, bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200837 u32 data;
838 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
839 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100840
841 /*
842 * multi CS support - reg_addr is calucalated in calling function
843 * with CS offset
844 */
845 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200846 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
847 for (bus_id = 0; bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100848 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200849 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100850 data = pup_values[if_id *
Chris Packham1a07d212018-05-10 13:28:29 +1200851 octets_per_if_num +
Stefan Roese5ffceb82015-03-26 15:36:56 +0100852 bus_id];
853 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
854 ACCESS_TYPE_UNICAST,
855 if_id,
856 ACCESS_TYPE_UNICAST,
857 bus_id, DDR_PHY_DATA,
858 reg_addr, data));
859 }
860 }
861
862 return 0;
863}
864
Chris Packham1a07d212018-05-10 13:28:29 +1200865/**
866 * Read Phase Value
867 */
868int read_phase_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
869 int reg_addr, u32 mask)
870{
871 u32 data_value;
872 u32 if_id = 0, bus_id = 0;
873 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
874 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
875
876 /* multi CS support - reg_addr is calucalated in calling function with CS offset */
877 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
878 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
879 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) {
880 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
881 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
882 ACCESS_TYPE_UNICAST,
883 bus_id,
884 DDR_PHY_DATA, reg_addr,
885 &data_value));
886 pup_values[if_id * octets_per_if_num + bus_id] = data_value & mask;
887 }
888 }
889
890 return 0;
891}
892
893/**
894 * Write Leveling Value
895 */
896int write_leveling_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
897 u32 pup_ph_values[MAX_INTERFACE_NUM * MAX_BUS_NUM], int reg_addr)
898{
899 u32 if_id = 0, bus_id = 0;
900 u32 data;
901 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
902 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
903
904 /* multi CS support - reg_addr is calucalated in calling function with CS offset */
905 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
906 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
907 for (bus_id = 0 ; bus_id < octets_per_if_num ; bus_id++) {
908 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
909 data = pup_values[if_id * octets_per_if_num + bus_id] +
910 pup_ph_values[if_id * octets_per_if_num + bus_id];
911 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
912 ACCESS_TYPE_UNICAST,
913 if_id,
914 ACCESS_TYPE_UNICAST,
915 bus_id,
916 DDR_PHY_DATA,
917 reg_addr,
918 data));
919 }
920 }
921
922 return 0;
923}
924
925#if !defined(EXCLUDE_SWITCH_DEBUG)
926struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100927u32 start_xsb_offset = 0;
928u8 is_rl_old = 0;
929u8 is_freq_old = 0;
930u8 is_dfs_disabled = 0;
931u32 default_centrlization_value = 0x12;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100932u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
933 rl_test = 0, reset_read_fifo = 0;
934int debug_acc = 0;
935u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
936u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100937
938u32 xsb_test_table[][8] = {
939 {0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
940 0x66666666, 0x77777777},
941 {0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
942 0xeeeeeeee, 0xffffffff},
943 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
944 0x00000000, 0xffffffff},
945 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
946 0x00000000, 0xffffffff},
947 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
948 0x00000000, 0xffffffff},
949 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
950 0x00000000, 0xffffffff},
951 {0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
952 0xffffffff, 0xffffffff},
953 {0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
954 0x00000000, 0x00000000},
955 {0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
956 0xffffffff, 0xffffffff}
957};
958
Stefan Roese5ffceb82015-03-26 15:36:56 +0100959int ddr3_tip_print_adll(void)
960{
961 u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200962 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
963 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100964
965 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200966 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
967 for (bus_cnt = 0; bus_cnt < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100968 bus_cnt++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200969 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100970 CHECK_STATUS(ddr3_tip_bus_read
971 (dev_num, if_id,
972 ACCESS_TYPE_UNICAST, bus_cnt,
973 DDR_PHY_DATA, 0x1, &data_p1));
974 CHECK_STATUS(ddr3_tip_bus_read
975 (dev_num, if_id, ACCESS_TYPE_UNICAST,
976 bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
977 CHECK_STATUS(ddr3_tip_bus_read
978 (dev_num, if_id, ACCESS_TYPE_UNICAST,
979 bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
980 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
981 (" IF %d bus_cnt %d phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
982 if_id, bus_cnt, data_p1, data_p2,
983 ui_data3));
984 }
985 }
986
987 return MV_OK;
988}
989
Chris Packham1a07d212018-05-10 13:28:29 +1200990#endif /* EXCLUDE_SWITCH_DEBUG */
991
992#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100993/*
994 * Print ADLL
995 */
996int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
997{
998 u32 i, j;
Chris Packham1a07d212018-05-10 13:28:29 +1200999 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1000 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +01001001
Chris Packham1a07d212018-05-10 13:28:29 +12001002 for (j = 0; j < octets_per_if_num; j++) {
1003 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1004 for (i = 0; i < MAX_INTERFACE_NUM; i++)
1005 printf("%d ,", adll[i * octets_per_if_num + j]);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001006 }
1007 printf("\n");
1008
1009 return MV_OK;
1010}
Stefan Roese5ffceb82015-03-26 15:36:56 +01001011
Chris Packham1a07d212018-05-10 13:28:29 +12001012int print_ph(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1013{
1014 u32 i, j;
1015 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1016 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1017
1018 for (j = 0; j < octets_per_if_num; j++) {
1019 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1020 for (i = 0; i < MAX_INTERFACE_NUM; i++)
1021 printf("%d ,", adll[i * octets_per_if_num + j] >> 6);
1022 }
1023 printf("\n");
1024
1025 return MV_OK;
1026}
1027#endif /* DDR_VIEWER_TOOL */
1028
1029#if !defined(EXCLUDE_SWITCH_DEBUG)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001030/* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
1031static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
1032 u32 byte_index)
1033{
1034 u32 burst_cnt = 0, addr_offset, i_id;
1035 int b_is_fail = 0;
1036
1037 addr_offset =
1038 (byte_index ==
1039 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
1040 for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
1041 if ((p_src[burst_cnt] & addr_offset) !=
Chris Packham1a07d212018-05-10 13:28:29 +12001042 (p_dst[if_id] & addr_offset))
Stefan Roese5ffceb82015-03-26 15:36:56 +01001043 b_is_fail = 1;
1044 }
1045
1046 if (b_is_fail == 1) {
1047 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1048 ("IF %d exp: ", if_id));
1049 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1050 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1051 ("0x%8x ", p_src[i_id]));
1052 }
1053 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1054 ("\n_i_f %d rcv: ", if_id));
1055 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1056 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1057 ("(0x%8x ", p_dst[i_id]));
1058 }
1059 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
1060 }
1061
1062 return b_is_fail;
1063}
Chris Packham1a07d212018-05-10 13:28:29 +12001064#endif /* EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001065
Chris Packham1a07d212018-05-10 13:28:29 +12001066#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001067/*
1068 * Sweep validation
1069 */
1070int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
1071 u32 mode)
1072{
1073 u32 pup = 0, start_pup = 0, end_pup = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001074 u32 adll = 0, rep = 0, pattern_idx = 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001075 u32 res[MAX_INTERFACE_NUM] = { 0 };
1076 int if_id = 0;
1077 u32 adll_value = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001078 u32 reg;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001079 enum hws_access_type pup_access;
1080 u32 cs;
Chris Packham4bf81db2018-12-03 14:26:49 +13001081 unsigned int max_cs = mv_ddr_cs_num_get();
Chris Packham1a07d212018-05-10 13:28:29 +12001082 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1083 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1084
1085 repeat_num = 2;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001086
Stefan Roese5ffceb82015-03-26 15:36:56 +01001087 if (mode == 1) {
1088 /* per pup */
1089 start_pup = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001090 end_pup = octets_per_if_num - 1;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001091 pup_access = ACCESS_TYPE_UNICAST;
1092 } else {
1093 start_pup = 0;
1094 end_pup = 0;
1095 pup_access = ACCESS_TYPE_MULTICAST;
1096 }
1097
1098 for (cs = 0; cs < max_cs; cs++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001099 reg = (direction == 0) ? CTX_PHY_REG(cs) : CRX_PHY_REG(cs);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001100 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1101 for (if_id = 0;
1102 if_id <= MAX_INTERFACE_NUM - 1;
1103 if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001104 VALIDATE_IF_ACTIVE
Stefan Roese5ffceb82015-03-26 15:36:56 +01001105 (tm->if_act_mask,
1106 if_id);
1107 for (pup = start_pup; pup <= end_pup; pup++) {
1108 ctrl_sweepres[adll][if_id][pup] =
1109 0;
1110 }
1111 }
1112 }
1113
1114 for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
1115 ctrl_adll[adll] = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001116 /* Save DQS value(after algorithm run) */
1117 ddr3_tip_read_adll_value(dev_num, ctrl_adll,
1118 reg, MASK_ALL_BITS);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001119
1120 /*
1121 * Sweep ADLL from 0:31 on all I/F on all Pup and perform
1122 * BIST on each stage.
1123 */
1124 for (pup = start_pup; pup <= end_pup; pup++) {
1125 for (adll = 0; adll < ADLL_LENGTH; adll++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001126 for (rep = 0; rep < repeat_num; rep++) {
1127 for (pattern_idx = PATTERN_KILLER_DQ0;
1128 pattern_idx < PATTERN_LAST;
1129 pattern_idx++) {
1130 adll_value =
1131 (direction == 0) ? (adll * 2) : adll;
1132 CHECK_STATUS(ddr3_tip_bus_write
1133 (dev_num, ACCESS_TYPE_MULTICAST, 0,
1134 pup_access, pup, DDR_PHY_DATA,
1135 reg, adll_value));
1136 hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1137 cs);
1138 /* ddr3_tip_reset_fifo_ptr(dev_num); */
1139 for (if_id = 0;
1140 if_id < MAX_INTERFACE_NUM;
1141 if_id++) {
1142 VALIDATE_IF_ACTIVE
1143 (tm->if_act_mask,
1144 if_id);
1145 ctrl_sweepres[adll][if_id][pup]
1146 += res[if_id];
1147 if (mode == 1) {
1148 CHECK_STATUS
1149 (ddr3_tip_bus_write
1150 (dev_num,
1151 ACCESS_TYPE_UNICAST,
1152 if_id,
1153 ACCESS_TYPE_UNICAST,
1154 pup,
1155 DDR_PHY_DATA,
1156 reg,
1157 ctrl_adll[if_id *
1158 cs *
1159 octets_per_if_num
1160 + pup]));
1161 }
1162 }
Stefan Roese5ffceb82015-03-26 15:36:56 +01001163 }
1164 }
1165 }
1166 }
1167 printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1168 ((direction == 0) ? "TX" : "RX"));
1169 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001170 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001171 if (mode == 1) {
1172 for (pup = start_pup; pup <= end_pup; pup++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001173 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001174 printf("I/F%d-PHY%d , ", if_id, pup);
1175 }
1176 } else {
1177 printf("I/F%d , ", if_id);
1178 }
1179 }
1180 printf("\n");
1181
1182 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1183 adll_value = (direction == 0) ? (adll * 2) : adll;
1184 printf("Final,%s, Sweep, Result, %d ,",
1185 ((direction == 0) ? "TX" : "RX"), adll_value);
1186
1187 for (if_id = 0;
1188 if_id <= MAX_INTERFACE_NUM - 1;
1189 if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001190 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001191 for (pup = start_pup; pup <= end_pup; pup++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001192 printf("%8d , ",
Stefan Roese5ffceb82015-03-26 15:36:56 +01001193 ctrl_sweepres[adll][if_id]
1194 [pup]);
1195 }
1196 }
1197 printf("\n");
1198 }
1199
1200 /*
1201 * Write back to the phy the Rx DQS value, we store in
1202 * the beginning.
1203 */
Chris Packham1a07d212018-05-10 13:28:29 +12001204 ddr3_tip_write_adll_value(dev_num, ctrl_adll, reg);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001205 /* print adll results */
Chris Packham1a07d212018-05-10 13:28:29 +12001206 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001207 printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1208 print_adll(dev_num, ctrl_adll);
Chris Packham1a07d212018-05-10 13:28:29 +12001209 }
1210 ddr3_tip_reset_fifo_ptr(dev_num);
1211
1212 return 0;
1213}
1214
1215#if defined(EXCLUDE_SWITCH_DEBUG)
1216int ddr3_tip_run_leveling_sweep_test(int dev_num, u32 repeat_num,
1217 u32 direction, u32 mode)
1218{
1219 u32 pup = 0, start_pup = 0, end_pup = 0, start_adll = 0;
1220 u32 adll = 0, rep = 0, pattern_idx = 0;
1221 u32 read_data[MAX_INTERFACE_NUM];
1222 u32 res[MAX_INTERFACE_NUM] = { 0 };
1223 int if_id = 0, gap = 0;
1224 u32 adll_value = 0;
1225 u32 reg;
1226 enum hws_access_type pup_access;
1227 u32 cs;
Chris Packham4bf81db2018-12-03 14:26:49 +13001228 unsigned int max_cs = mv_ddr_cs_num_get();
Chris Packham1a07d212018-05-10 13:28:29 +12001229 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1230 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1231
1232 if (mode == 1) { /* per pup */
1233 start_pup = 0;
1234 end_pup = octets_per_if_num - 1;
1235 pup_access = ACCESS_TYPE_UNICAST;
1236 } else {
1237 start_pup = 0;
1238 end_pup = 0;
1239 pup_access = ACCESS_TYPE_MULTICAST;
1240 }
1241
1242 for (cs = 0; cs < max_cs; cs++) {
1243 reg = (direction == 0) ? WL_PHY_REG(cs) : RL_PHY_REG(cs);
1244 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1245 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1246 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1247 for (pup = start_pup; pup <= end_pup; pup++)
1248 ctrl_sweepres[adll][if_id][pup] = 0;
1249 }
1250 }
1251
1252 for (adll = 0; adll < MAX_INTERFACE_NUM * MAX_BUS_NUM; adll++) {
1253 ctrl_adll[adll] = 0;
1254 ctrl_level_phase[adll] = 0;
1255 ctrl_adll1[adll] = 0;
1256 }
1257
1258 /* save leveling value after running algorithm */
1259 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, 0x1f);
1260 read_phase_value(dev_num, ctrl_level_phase, reg, 0x7 << 6);
1261
1262 if (direction == 0)
1263 ddr3_tip_read_adll_value(dev_num, ctrl_adll1,
1264 CTX_PHY_REG(cs), MASK_ALL_BITS);
1265
1266 /* Sweep ADLL from 0 to 31 on all interfaces, all pups,
1267 * and perform BIST on each stage
1268 */
1269 for (pup = start_pup; pup <= end_pup; pup++) {
1270 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1271 for (rep = 0; rep < repeat_num; rep++) {
1272 adll_value = (direction == 0) ? (adll * 2) : (adll * 3);
1273 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1274 start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1275 (ctrl_level_phase[if_id * cs *
1276 octets_per_if_num +
1277 pup] >> 6) * 32;
1278
1279 if (direction == 0)
1280 start_adll = (start_adll > 32) ? (start_adll - 32) : 0;
1281 else
1282 start_adll = (start_adll > 48) ? (start_adll - 48) : 0;
1283
1284 adll_value += start_adll;
1285
1286 gap = ctrl_adll1[if_id * cs * octets_per_if_num + pup] -
1287 ctrl_adll[if_id * cs * octets_per_if_num + pup];
1288 gap = (((adll_value % 32) + gap) % 64);
1289
1290 adll_value = ((adll_value % 32) +
1291 (((adll_value - (adll_value % 32)) / 32) << 6));
1292
1293 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1294 ACCESS_TYPE_UNICAST,
1295 if_id,
1296 pup_access,
1297 pup,
1298 DDR_PHY_DATA,
1299 reg,
1300 adll_value));
1301 if (direction == 0)
1302 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1303 ACCESS_TYPE_UNICAST,
1304 if_id,
1305 pup_access,
1306 pup,
1307 DDR_PHY_DATA,
1308 CTX_PHY_REG(cs),
1309 gap));
1310 }
1311
1312 for (pattern_idx = PATTERN_KILLER_DQ0;
1313 pattern_idx < PATTERN_LAST;
1314 pattern_idx++) {
1315 hws_ddr3_run_bist(dev_num, sweep_pattern, res, cs);
1316 ddr3_tip_reset_fifo_ptr(dev_num);
1317 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1318 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1319 if (pup != 4) { /* TODO: remove literal */
1320 ctrl_sweepres[adll][if_id][pup] += res[if_id];
1321 } else {
1322 CHECK_STATUS(ddr3_tip_if_read(dev_num,
1323 ACCESS_TYPE_UNICAST,
1324 if_id,
1325 0x1458,
1326 read_data,
1327 MASK_ALL_BITS));
1328 ctrl_sweepres[adll][if_id][pup] += read_data[if_id];
1329 CHECK_STATUS(ddr3_tip_if_write(dev_num,
1330 ACCESS_TYPE_UNICAST,
1331 if_id,
1332 0x1458,
1333 0x0,
1334 0xFFFFFFFF));
1335 CHECK_STATUS(ddr3_tip_if_write(dev_num,
1336 ACCESS_TYPE_UNICAST,
1337 if_id,
1338 0x145C,
1339 0x0,
1340 0xFFFFFFFF));
1341 }
1342 }
1343 }
1344 }
1345 }
1346
1347 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1348 start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1349 ctrl_level_phase[if_id * cs * octets_per_if_num + pup];
1350 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, pup_access, pup,
1351 DDR_PHY_DATA, reg, start_adll));
1352 if (direction == 0)
1353 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1354 ACCESS_TYPE_UNICAST,
1355 if_id,
1356 pup_access,
1357 pup,
1358 DDR_PHY_DATA,
1359 CTX_PHY_REG(cs),
1360 ctrl_adll1[if_id *
1361 cs *
1362 octets_per_if_num +
1363 pup]));
1364 }
1365 }
1366
1367 printf("Final,CS %d,%s,Leveling,Result,Adll,", cs, ((direction == 0) ? "TX" : "RX"));
1368
1369 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1370 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1371 if (mode == 1) {
1372 for (pup = start_pup; pup <= end_pup; pup++) {
1373 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1374 printf("I/F%d-PHY%d , ", if_id, pup);
1375 }
1376 } else {
1377 printf("I/F%d , ", if_id);
1378 }
1379 }
1380 printf("\n");
1381
1382 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1383 adll_value = (direction == 0) ? ((adll * 2) - 32) : ((adll * 3) - 48);
1384 printf("Final,%s,LevelingSweep,Result, %d ,", ((direction == 0) ? "TX" : "RX"), adll_value);
1385
1386 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1387 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1388 for (pup = start_pup; pup <= end_pup; pup++)
1389 printf("%8d , ", ctrl_sweepres[adll][if_id][pup]);
1390 }
1391 printf("\n");
1392 }
1393
1394 /* write back to the phy the Rx DQS value, we store in the beginning */
1395 write_leveling_value(dev_num, ctrl_adll, ctrl_level_phase, reg);
1396 if (direction == 0)
1397 ddr3_tip_write_adll_value(dev_num, ctrl_adll1, CTX_PHY_REG(cs));
1398
1399 /* print adll results */
1400 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1401 printf("%s,DQS,Leveling,,,", (direction == 0) ? "Tx" : "Rx");
1402 print_adll(dev_num, ctrl_adll);
1403 print_ph(dev_num, ctrl_level_phase);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001404 }
1405 ddr3_tip_reset_fifo_ptr(dev_num);
1406
1407 return 0;
1408}
Chris Packham1a07d212018-05-10 13:28:29 +12001409#endif /* EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001410
Chris Packham1a07d212018-05-10 13:28:29 +12001411void print_topology(struct mv_ddr_topology_map *topology_db)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001412{
1413 u32 ui, uj;
Chris Packham1a07d212018-05-10 13:28:29 +12001414 u32 dev_num = 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001415
1416 printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
Chris Packham1a07d212018-05-10 13:28:29 +12001417 printf("\tNumber of buses: 0x%x\n",
1418 ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE));
Stefan Roese5ffceb82015-03-26 15:36:56 +01001419 printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1420
1421 for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001422 VALIDATE_IF_ACTIVE(topology_db->if_act_mask, ui);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001423 printf("\n\tInterface ID: %d\n", ui);
1424 printf("\t\tDDR Frequency: %s\n",
1425 convert_freq(topology_db->
1426 interface_params[ui].memory_freq));
1427 printf("\t\tSpeed_bin: %d\n",
1428 topology_db->interface_params[ui].speed_bin_index);
1429 printf("\t\tBus_width: %d\n",
1430 (4 << topology_db->interface_params[ui].bus_width));
1431 printf("\t\tMem_size: %s\n",
1432 convert_mem_size(topology_db->
1433 interface_params[ui].memory_size));
1434 printf("\t\tCAS-WL: %d\n",
1435 topology_db->interface_params[ui].cas_wl);
1436 printf("\t\tCAS-L: %d\n",
1437 topology_db->interface_params[ui].cas_l);
1438 printf("\t\tTemperature: %d\n",
1439 topology_db->interface_params[ui].interface_temp);
1440 printf("\n");
1441 for (uj = 0; uj < 4; uj++) {
1442 printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1443 topology_db->interface_params[ui].
1444 as_bus_params[uj].cs_bitmask);
1445 printf("Mirror: 0x%x\t",
1446 topology_db->interface_params[ui].
1447 as_bus_params[uj].mirror_enable_bitmask);
1448 printf("DQS Swap is %s \t",
1449 (topology_db->
1450 interface_params[ui].as_bus_params[uj].
1451 is_dqs_swap == 1) ? "enabled" : "disabled");
1452 printf("Ck Swap:%s\t",
1453 (topology_db->
1454 interface_params[ui].as_bus_params[uj].
1455 is_ck_swap == 1) ? "enabled" : "disabled");
1456 printf("\n");
1457 }
1458 }
1459}
Chris Packham1a07d212018-05-10 13:28:29 +12001460#endif /* DDR_VIEWER_TOOL */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001461
Chris Packham1a07d212018-05-10 13:28:29 +12001462#if !defined(EXCLUDE_SWITCH_DEBUG)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001463/*
1464 * Execute XSB Test transaction (rd/wr/both)
1465 */
1466int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1467 u32 read_type, u32 burst_length)
1468{
1469 u32 seq = 0, if_id = 0, addr, cnt;
1470 int ret = MV_OK, ret_tmp;
1471 u32 data_read[MAX_INTERFACE_NUM];
Chris Packham1a07d212018-05-10 13:28:29 +12001472 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +01001473
1474 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001475 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001476 addr = mem_addr;
1477 for (cnt = 0; cnt <= burst_length; cnt++) {
1478 seq = (seq + 1) % 8;
1479 if (write_type != 0) {
1480 CHECK_STATUS(ddr3_tip_ext_write
1481 (dev_num, if_id, addr, 1,
1482 xsb_test_table[seq]));
1483 }
1484 if (read_type != 0) {
1485 CHECK_STATUS(ddr3_tip_ext_read
1486 (dev_num, if_id, addr, 1,
1487 data_read));
1488 }
1489 if ((read_type != 0) && (write_type != 0)) {
1490 ret_tmp =
1491 ddr3_tip_compare(if_id,
1492 xsb_test_table[seq],
1493 data_read,
1494 0xff);
1495 addr += (EXT_ACCESS_BURST_LENGTH * 4);
1496 ret = (ret != MV_OK) ? ret : ret_tmp;
1497 }
1498 }
1499 }
1500
1501 return ret;
1502}
1503
1504#else /*EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001505u32 start_xsb_offset = 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001506
1507int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1508 u32 read_type, u32 burst_length)
1509{
1510 return MV_OK;
1511}
1512
Chris Packham1a07d212018-05-10 13:28:29 +12001513#endif /* EXCLUDE_SWITCH_DEBUG */