blob: 0b65168d82afdff05895bb94b10cef26e24c647f [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Stefan Roese5ffceb82015-03-26 15:36:56 +01002/*
3 * Copyright (C) Marvell International Ltd. and its affiliates
Stefan Roese5ffceb82015-03-26 15:36:56 +01004 */
5
Stefan Roese5ffceb82015-03-26 15:36:56 +01006#include "ddr3_init.h"
Chris Packham4bf81db2018-12-03 14:26:49 +13007#include "mv_ddr_training_db.h"
8#include "mv_ddr_regs.h"
Stefan Roese5ffceb82015-03-26 15:36:56 +01009
Marek Behúne8bd7582024-06-18 17:34:28 +020010#if !defined(CONFIG_DDR_IMMUTABLE_DEBUG_SETTINGS)
Stefan Roese5ffceb82015-03-26 15:36:56 +010011u8 is_reg_dump = 0;
12u8 debug_pbs = DEBUG_LEVEL_ERROR;
Marek Behúne8bd7582024-06-18 17:34:28 +020013#endif
Stefan Roese5ffceb82015-03-26 15:36:56 +010014
15/*
16 * API to change flags outside of the lib
17 */
Marek Behúne8bd7582024-06-18 17:34:28 +020018#if defined(SILENT_LIB) || defined(CONFIG_DDR_IMMUTABLE_DEBUG_SETTINGS)
Chris Packham1a07d212018-05-10 13:28:29 +120019void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
20{
21 /* do nothing */
22}
Marek Behúne8bd7582024-06-18 17:34:28 +020023#else /* !SILENT_LIB && !CONFIG_DDR_IMMUTABLE_DEBUG_SETTINGS */
24
Stefan Roese5ffceb82015-03-26 15:36:56 +010025/* Debug flags for other Training modules */
26u8 debug_training_static = DEBUG_LEVEL_ERROR;
27u8 debug_training = DEBUG_LEVEL_ERROR;
28u8 debug_leveling = DEBUG_LEVEL_ERROR;
29u8 debug_centralization = DEBUG_LEVEL_ERROR;
30u8 debug_training_ip = DEBUG_LEVEL_ERROR;
31u8 debug_training_bist = DEBUG_LEVEL_ERROR;
32u8 debug_training_hw_alg = DEBUG_LEVEL_ERROR;
33u8 debug_training_access = DEBUG_LEVEL_ERROR;
Chris Packham1a07d212018-05-10 13:28:29 +120034u8 debug_training_device = DEBUG_LEVEL_ERROR;
35
Tony Dinhe2c524b2023-01-18 19:03:04 -080036#if defined(CONFIG_DDR4)
37u8 debug_tap_tuning = DEBUG_LEVEL_ERROR;
38u8 debug_calibration = DEBUG_LEVEL_ERROR;
39u8 debug_ddr4_centralization = DEBUG_LEVEL_ERROR;
40u8 debug_dm_tuning = DEBUG_LEVEL_ERROR;
41#endif /* CONFIG_DDR4 */
Chris Packham1a07d212018-05-10 13:28:29 +120042
43void mv_ddr_user_log_level_set(enum ddr_lib_debug_block block)
44{
45 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
46 ddr3_hws_set_log_level(block, tm->debug_level);
47};
Stefan Roese5ffceb82015-03-26 15:36:56 +010048
49void ddr3_hws_set_log_level(enum ddr_lib_debug_block block, u8 level)
50{
51 switch (block) {
52 case DEBUG_BLOCK_STATIC:
53 debug_training_static = level;
54 break;
55 case DEBUG_BLOCK_TRAINING_MAIN:
56 debug_training = level;
57 break;
58 case DEBUG_BLOCK_LEVELING:
59 debug_leveling = level;
60 break;
61 case DEBUG_BLOCK_CENTRALIZATION:
62 debug_centralization = level;
63 break;
64 case DEBUG_BLOCK_PBS:
65 debug_pbs = level;
66 break;
67 case DEBUG_BLOCK_ALG:
68 debug_training_hw_alg = level;
69 break;
70 case DEBUG_BLOCK_DEVICE:
Chris Packham1a07d212018-05-10 13:28:29 +120071 debug_training_device = level;
Stefan Roese5ffceb82015-03-26 15:36:56 +010072 break;
73 case DEBUG_BLOCK_ACCESS:
74 debug_training_access = level;
75 break;
76 case DEBUG_STAGES_REG_DUMP:
77 if (level == DEBUG_LEVEL_TRACE)
78 is_reg_dump = 1;
79 else
80 is_reg_dump = 0;
81 break;
Tony Dinhe2c524b2023-01-18 19:03:04 -080082#if defined(CONFIG_DDR4)
83 case DEBUG_TAP_TUNING_ENGINE:
84 debug_tap_tuning = level;
85 break;
86 case DEBUG_BLOCK_CALIBRATION:
87 debug_calibration = level;
88 break;
89 case DEBUG_BLOCK_DDR4_CENTRALIZATION:
90 debug_ddr4_centralization = level;
91 break;
92#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +010093 case DEBUG_BLOCK_ALL:
94 default:
95 debug_training_static = level;
96 debug_training = level;
97 debug_leveling = level;
98 debug_centralization = level;
99 debug_pbs = level;
100 debug_training_hw_alg = level;
101 debug_training_access = level;
Chris Packham1a07d212018-05-10 13:28:29 +1200102 debug_training_device = level;
Tony Dinhe2c524b2023-01-18 19:03:04 -0800103#if defined(CONFIG_DDR4)
104 debug_tap_tuning = level;
105 debug_calibration = level;
106 debug_ddr4_centralization = level;
107#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100108 }
109}
Marek Behúne8bd7582024-06-18 17:34:28 +0200110#endif /* !SILENT_LIB && !CONFIG_DDR_IMMUTABLE_DEBUG_SETTINGS */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100111
Chris Packham1a07d212018-05-10 13:28:29 +1200112#if defined(DDR_VIEWER_TOOL)
Chris Packham4bf81db2018-12-03 14:26:49 +1300113static char *convert_freq(enum mv_ddr_freq freq);
Chris Packham1a07d212018-05-10 13:28:29 +1200114#if defined(EXCLUDE_SWITCH_DEBUG)
115u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
116u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
117u32 ctrl_adll1[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
118u32 ctrl_level_phase[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
119#endif /* EXCLUDE_SWITCH_DEBUG */
Marek Behún11ffc382024-06-18 17:34:27 +0200120
121static u8 is_validate_window_per_if = 0;
122static u8 is_validate_window_per_pup = 0;
123static u8 sweep_cnt = 1;
124static u8 is_run_leveling_sweep_tests;
Chris Packham1a07d212018-05-10 13:28:29 +1200125#endif /* DDR_VIEWER_TOOL */
126
127struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
Chris Packham1a07d212018-05-10 13:28:29 +1200128
129static struct hws_xsb_info xsb_info[MAX_DEVICE_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100130
131/*
132 * Dump Dunit & Phy registers
133 */
134int ddr3_tip_reg_dump(u32 dev_num)
135{
136 u32 if_id, reg_addr, data_value, bus_id;
137 u32 read_data[MAX_INTERFACE_NUM];
Chris Packham1a07d212018-05-10 13:28:29 +1200138 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
139 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100140
141 printf("-- dunit registers --\n");
142 for (reg_addr = 0x1400; reg_addr < 0x19f0; reg_addr += 4) {
143 printf("0x%x ", reg_addr);
144 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200145 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100146 CHECK_STATUS(ddr3_tip_if_read
147 (dev_num, ACCESS_TYPE_UNICAST,
148 if_id, reg_addr, read_data,
149 MASK_ALL_BITS));
150 printf("0x%x ", read_data[if_id]);
151 }
152 printf("\n");
153 }
154
155 printf("-- Phy registers --\n");
156 for (reg_addr = 0; reg_addr <= 0xff; reg_addr++) {
157 printf("0x%x ", reg_addr);
158 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200159 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100160 for (bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200161 bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100162 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200163 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100164 CHECK_STATUS(ddr3_tip_bus_read
165 (dev_num, if_id,
166 ACCESS_TYPE_UNICAST, bus_id,
167 DDR_PHY_DATA, reg_addr,
168 &data_value));
169 printf("0x%x ", data_value);
170 }
171 for (bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200172 bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100173 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200174 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100175 CHECK_STATUS(ddr3_tip_bus_read
176 (dev_num, if_id,
177 ACCESS_TYPE_UNICAST, bus_id,
178 DDR_PHY_CONTROL, reg_addr,
179 &data_value));
180 printf("0x%x ", data_value);
181 }
182 }
183 printf("\n");
184 }
185
186 return MV_OK;
187}
188
189/*
190 * Register access func registration
191 */
192int ddr3_tip_init_config_func(u32 dev_num,
193 struct hws_tip_config_func_db *config_func)
194{
195 if (config_func == NULL)
196 return MV_BAD_PARAM;
197
198 memcpy(&config_func_info[dev_num], config_func,
199 sizeof(struct hws_tip_config_func_db));
200
201 return MV_OK;
202}
203
204/*
Stefan Roese5ffceb82015-03-26 15:36:56 +0100205 * Get training result info pointer
206 */
207enum hws_result *ddr3_tip_get_result_ptr(u32 stage)
208{
209 return training_result[stage];
210}
211
212/*
213 * Device info read
214 */
215int ddr3_tip_get_device_info(u32 dev_num, struct ddr3_device_info *info_ptr)
216{
217 if (config_func_info[dev_num].tip_get_device_info_func != NULL) {
218 return config_func_info[dev_num].
219 tip_get_device_info_func((u8) dev_num, info_ptr);
220 }
221
222 return MV_FAIL;
223}
224
Chris Packham1a07d212018-05-10 13:28:29 +1200225#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100226/*
227 * Convert freq to character string
228 */
Chris Packham4bf81db2018-12-03 14:26:49 +1300229static char *convert_freq(enum mv_ddr_freq freq)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100230{
231 switch (freq) {
Chris Packham4bf81db2018-12-03 14:26:49 +1300232 case MV_DDR_FREQ_LOW_FREQ:
233 return "MV_DDR_FREQ_LOW_FREQ";
Chris Packham1a07d212018-05-10 13:28:29 +1200234
Tony Dinhe2c524b2023-01-18 19:03:04 -0800235#if !defined(CONFIG_DDR4)
Chris Packham4bf81db2018-12-03 14:26:49 +1300236 case MV_DDR_FREQ_400:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100237 return "400";
238
Chris Packham4bf81db2018-12-03 14:26:49 +1300239 case MV_DDR_FREQ_533:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100240 return "533";
Tony Dinhe2c524b2023-01-18 19:03:04 -0800241#endif /* CONFIG_DDR4 */
Chris Packham1a07d212018-05-10 13:28:29 +1200242
Chris Packham4bf81db2018-12-03 14:26:49 +1300243 case MV_DDR_FREQ_667:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100244 return "667";
245
Chris Packham4bf81db2018-12-03 14:26:49 +1300246 case MV_DDR_FREQ_800:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100247 return "800";
248
Chris Packham4bf81db2018-12-03 14:26:49 +1300249 case MV_DDR_FREQ_933:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100250 return "933";
251
Chris Packham4bf81db2018-12-03 14:26:49 +1300252 case MV_DDR_FREQ_1066:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100253 return "1066";
Chris Packham1a07d212018-05-10 13:28:29 +1200254
Tony Dinhe2c524b2023-01-18 19:03:04 -0800255#if !defined(CONFIG_DDR4)
Chris Packham4bf81db2018-12-03 14:26:49 +1300256 case MV_DDR_FREQ_311:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100257 return "311";
258
Chris Packham4bf81db2018-12-03 14:26:49 +1300259 case MV_DDR_FREQ_333:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100260 return "333";
261
Chris Packham4bf81db2018-12-03 14:26:49 +1300262 case MV_DDR_FREQ_467:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100263 return "467";
264
Chris Packham4bf81db2018-12-03 14:26:49 +1300265 case MV_DDR_FREQ_850:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100266 return "850";
267
Chris Packham4bf81db2018-12-03 14:26:49 +1300268 case MV_DDR_FREQ_900:
Stefan Roese5ffceb82015-03-26 15:36:56 +0100269 return "900";
270
Chris Packham4bf81db2018-12-03 14:26:49 +1300271 case MV_DDR_FREQ_360:
272 return "MV_DDR_FREQ_360";
Stefan Roese5ffceb82015-03-26 15:36:56 +0100273
Chris Packham4bf81db2018-12-03 14:26:49 +1300274 case MV_DDR_FREQ_1000:
275 return "MV_DDR_FREQ_1000";
Tony Dinhe2c524b2023-01-18 19:03:04 -0800276#endif /* CONFIG_DDR4 */
Chris Packham1a07d212018-05-10 13:28:29 +1200277
Stefan Roese5ffceb82015-03-26 15:36:56 +0100278 default:
279 return "Unknown Frequency";
280 }
281}
282
283/*
284 * Convert device ID to character string
285 */
286static char *convert_dev_id(u32 dev_id)
287{
288 switch (dev_id) {
289 case 0x6800:
290 return "A38xx";
291 case 0x6900:
292 return "A39XX";
293 case 0xf400:
294 return "AC3";
295 case 0xfc00:
296 return "BC2";
297
298 default:
299 return "Unknown Device";
300 }
301}
302
303/*
304 * Convert device ID to character string
305 */
306static char *convert_mem_size(u32 dev_id)
307{
308 switch (dev_id) {
309 case 0:
310 return "512 MB";
311 case 1:
312 return "1 GB";
313 case 2:
314 return "2 GB";
315 case 3:
316 return "4 GB";
317 case 4:
318 return "8 GB";
319
320 default:
321 return "wrong mem size";
322 }
323}
324
325int print_device_info(u8 dev_num)
326{
327 struct ddr3_device_info info_ptr;
Chris Packham1a07d212018-05-10 13:28:29 +1200328 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100329
330 CHECK_STATUS(ddr3_tip_get_device_info(dev_num, &info_ptr));
331 printf("=== DDR setup START===\n");
332 printf("\tDevice ID: %s\n", convert_dev_id(info_ptr.device_id));
333 printf("\tDDR3 CK delay: %d\n", info_ptr.ck_delay);
334 print_topology(tm);
335 printf("=== DDR setup END===\n");
336
337 return MV_OK;
338}
339
340void hws_ddr3_tip_sweep_test(int enable)
341{
342 if (enable) {
343 is_validate_window_per_if = 1;
344 is_validate_window_per_pup = 1;
345 debug_training = DEBUG_LEVEL_TRACE;
346 } else {
347 is_validate_window_per_if = 0;
348 is_validate_window_per_pup = 0;
349 }
350}
Chris Packham1a07d212018-05-10 13:28:29 +1200351#endif /* DDR_VIEWER_TOOL */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100352
353char *ddr3_tip_convert_tune_result(enum hws_result tune_result)
354{
355 switch (tune_result) {
356 case TEST_FAILED:
357 return "FAILED";
358 case TEST_SUCCESS:
359 return "PASS";
360 case NO_TEST_DONE:
361 return "NOT COMPLETED";
362 default:
363 return "Un-KNOWN";
364 }
365}
366
367/*
368 * Print log info
369 */
370int ddr3_tip_print_log(u32 dev_num, u32 mem_addr)
371{
372 u32 if_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200373 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100374
Chris Packham1a07d212018-05-10 13:28:29 +1200375#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100376 if ((is_validate_window_per_if != 0) ||
377 (is_validate_window_per_pup != 0)) {
378 u32 is_pup_log = 0;
Chris Packham4bf81db2018-12-03 14:26:49 +1300379 enum mv_ddr_freq freq;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100380
381 freq = tm->interface_params[first_active_if].memory_freq;
382
383 is_pup_log = (is_validate_window_per_pup != 0) ? 1 : 0;
384 printf("===VALIDATE WINDOW LOG START===\n");
385 printf("DDR Frequency: %s ======\n", convert_freq(freq));
386 /* print sweep windows */
387 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
388 ddr3_tip_run_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
Chris Packham1a07d212018-05-10 13:28:29 +1200389#if defined(EXCLUDE_SWITCH_DEBUG)
390 if (is_run_leveling_sweep_tests == 1) {
391 ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 0, is_pup_log);
392 ddr3_tip_run_leveling_sweep_test(dev_num, sweep_cnt, 1, is_pup_log);
393 }
394#endif /* EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100395 ddr3_tip_print_all_pbs_result(dev_num);
396 ddr3_tip_print_wl_supp_result(dev_num);
397 printf("===VALIDATE WINDOW LOG END ===\n");
398 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
399 ddr3_tip_reg_dump(dev_num);
400 }
Chris Packham1a07d212018-05-10 13:28:29 +1200401#endif /* DDR_VIEWER_TOOL */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100402
Marek Behúnbbafabc2024-06-18 17:34:25 +0200403 /* return early if we won't print anything anyway */
404 if (
405#if defined(SILENT_LIB)
406 1 ||
407#endif
408 debug_training < DEBUG_LEVEL_INFO) {
409 return MV_OK;
410 }
411
Stefan Roese5ffceb82015-03-26 15:36:56 +0100412 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200413 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100414
415 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
416 ("IF %d Status:\n", if_id));
417
418 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
419 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
420 ("\tInit Controller: %s\n",
421 ddr3_tip_convert_tune_result
422 (training_result[INIT_CONTROLLER]
423 [if_id])));
424 }
425 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
426 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
427 ("\tLow freq Config: %s\n",
428 ddr3_tip_convert_tune_result
429 (training_result[SET_LOW_FREQ]
430 [if_id])));
431 }
432 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
433 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
434 ("\tLoad Pattern: %s\n",
435 ddr3_tip_convert_tune_result
436 (training_result[LOAD_PATTERN]
437 [if_id])));
438 }
439 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
440 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
441 ("\tMedium freq Config: %s\n",
442 ddr3_tip_convert_tune_result
443 (training_result[SET_MEDIUM_FREQ]
444 [if_id])));
445 }
446 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
447 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
448 ("\tWL: %s\n",
449 ddr3_tip_convert_tune_result
450 (training_result[WRITE_LEVELING]
451 [if_id])));
452 }
453 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
454 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
455 ("\tLoad Pattern: %s\n",
456 ddr3_tip_convert_tune_result
457 (training_result[LOAD_PATTERN_2]
458 [if_id])));
459 }
460 if (mask_tune_func & READ_LEVELING_MASK_BIT) {
461 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
462 ("\tRL: %s\n",
463 ddr3_tip_convert_tune_result
464 (training_result[READ_LEVELING]
465 [if_id])));
466 }
467 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
468 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
469 ("\tWL Supp: %s\n",
470 ddr3_tip_convert_tune_result
471 (training_result[WRITE_LEVELING_SUPP]
472 [if_id])));
473 }
474 if (mask_tune_func & PBS_RX_MASK_BIT) {
475 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
476 ("\tPBS RX: %s\n",
477 ddr3_tip_convert_tune_result
478 (training_result[PBS_RX]
479 [if_id])));
480 }
481 if (mask_tune_func & PBS_TX_MASK_BIT) {
482 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
483 ("\tPBS TX: %s\n",
484 ddr3_tip_convert_tune_result
485 (training_result[PBS_TX]
486 [if_id])));
487 }
488 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
489 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
490 ("\tTarget freq Config: %s\n",
491 ddr3_tip_convert_tune_result
492 (training_result[SET_TARGET_FREQ]
493 [if_id])));
494 }
495 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
496 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
497 ("\tWL TF: %s\n",
498 ddr3_tip_convert_tune_result
499 (training_result[WRITE_LEVELING_TF]
500 [if_id])));
501 }
Tony Dinhe2c524b2023-01-18 19:03:04 -0800502#if !defined(CONFIG_DDR4)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100503 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
504 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
505 ("\tRL TF: %s\n",
506 ddr3_tip_convert_tune_result
507 (training_result[READ_LEVELING_TF]
508 [if_id])));
509 }
Tony Dinhe2c524b2023-01-18 19:03:04 -0800510#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100511 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
512 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
513 ("\tWL TF Supp: %s\n",
514 ddr3_tip_convert_tune_result
515 (training_result
516 [WRITE_LEVELING_SUPP_TF]
517 [if_id])));
518 }
519 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
520 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
521 ("\tCentr RX: %s\n",
522 ddr3_tip_convert_tune_result
523 (training_result[CENTRALIZATION_RX]
524 [if_id])));
525 }
526 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
527 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
528 ("\tVREF_CALIBRATION: %s\n",
529 ddr3_tip_convert_tune_result
530 (training_result[VREF_CALIBRATION]
531 [if_id])));
532 }
533 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
534 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
535 ("\tCentr TX: %s\n",
536 ddr3_tip_convert_tune_result
537 (training_result[CENTRALIZATION_TX]
538 [if_id])));
539 }
Tony Dinhe2c524b2023-01-18 19:03:04 -0800540#if defined(CONFIG_DDR4)
541 if (mask_tune_func & SW_READ_LEVELING_MASK_BIT) {
542 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
543 ("\tSW RL TF: %s\n",
544 ddr3_tip_convert_tune_result
545 (training_result[SW_READ_LEVELING]
546 [if_id])));
547 }
548 if (mask_tune_func & RECEIVER_CALIBRATION_MASK_BIT) {
549 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
550 ("\tRX CAL: %s\n",
551 ddr3_tip_convert_tune_result
552 (training_result[RECEIVER_CALIBRATION]
553 [if_id])));
554 }
555 if (mask_tune_func & WL_PHASE_CORRECTION_MASK_BIT) {
556 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
557 ("\tWL PHASE CORRECT: %s\n",
558 ddr3_tip_convert_tune_result
559 (training_result[WL_PHASE_CORRECTION]
560 [if_id])));
561 }
562 if (mask_tune_func & DQ_VREF_CALIBRATION_MASK_BIT) {
563 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
564 ("\tDQ VREF CAL: %s\n",
565 ddr3_tip_convert_tune_result
566 (training_result[DQ_VREF_CALIBRATION]
567 [if_id])));
568 }
569 if (mask_tune_func & DQ_MAPPING_MASK_BIT) {
570 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
571 ("\tDQ MAP: %s\n",
572 ddr3_tip_convert_tune_result
573 (training_result[DQ_MAPPING]
574 [if_id])));
575 }
576#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100577 }
578
579 return MV_OK;
580}
581
Chris Packham1a07d212018-05-10 13:28:29 +1200582#if !defined(EXCLUDE_DEBUG_PRINTS)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100583/*
584 * Print stability log info
585 */
586int ddr3_tip_print_stability_log(u32 dev_num)
587{
588 u8 if_id = 0, csindex = 0, bus_id = 0, idx = 0;
589 u32 reg_data;
Tony Dinhe2c524b2023-01-18 19:03:04 -0800590#if defined(CONFIG_DDR4)
591 u32 reg_data1;
592#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100593 u32 read_data[MAX_INTERFACE_NUM];
Chris Packham4bf81db2018-12-03 14:26:49 +1300594 unsigned int max_cs = mv_ddr_cs_num_get();
Chris Packham1a07d212018-05-10 13:28:29 +1200595 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100596
597 /* Title print */
598 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200599 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100600 printf("Title: I/F# , Tj, Calibration_n0, Calibration_p0, Calibration_n1, Calibration_p1, Calibration_n2, Calibration_p2,");
601 for (csindex = 0; csindex < max_cs; csindex++) {
602 printf("CS%d , ", csindex);
603 printf("\n");
Chris Packham1a07d212018-05-10 13:28:29 +1200604 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Tony Dinhe2c524b2023-01-18 19:03:04 -0800605#if defined(CONFIG_DDR4)
606 printf("DminTx, AreaTx, DminRx, AreaRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, CenTx, CenRx, Vref, DQVref,");
607 for (idx = 0; idx < 11; idx++)
608 printf("DC-Pad%d,", idx);
609#else /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100610 printf("VWTx, VWRx, WL_tot, WL_ADLL, WL_PH, RL_Tot, RL_ADLL, RL_PH, RL_Smp, Cen_tx, Cen_rx, Vref, DQVref,");
Tony Dinhe2c524b2023-01-18 19:03:04 -0800611#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100612 printf("\t\t");
613 for (idx = 0; idx < 11; idx++)
614 printf("PBSTx-Pad%d,", idx);
615 printf("\t\t");
616 for (idx = 0; idx < 11; idx++)
617 printf("PBSRx-Pad%d,", idx);
618 }
619 }
620 printf("\n");
621
622 /* Data print */
623 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200624 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100625
626 printf("Data: %d,%d,", if_id,
627 (config_func_info[dev_num].tip_get_temperature != NULL)
628 ? (config_func_info[dev_num].
629 tip_get_temperature(dev_num)) : (0));
630
631 CHECK_STATUS(ddr3_tip_if_read
632 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8,
633 read_data, MASK_ALL_BITS));
634 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
635 ((read_data[if_id] & 0xfc00) >> 10));
636 CHECK_STATUS(ddr3_tip_if_read
637 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8,
638 read_data, MASK_ALL_BITS));
639 printf("%d,%d,", ((read_data[if_id] & 0x3f0) >> 4),
640 ((read_data[if_id] & 0xfc00) >> 10));
641 CHECK_STATUS(ddr3_tip_if_read
642 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8,
643 read_data, MASK_ALL_BITS));
644 printf("%d,%d,", ((read_data[if_id] & 0x3f0000) >> 16),
645 ((read_data[if_id] & 0xfc00000) >> 22));
646
647 for (csindex = 0; csindex < max_cs; csindex++) {
648 printf("CS%d , ", csindex);
649 for (bus_id = 0; bus_id < MAX_BUS_NUM; bus_id++) {
650 printf("\n");
Chris Packham1a07d212018-05-10 13:28:29 +1200651 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Tony Dinhe2c524b2023-01-18 19:03:04 -0800652#if defined(CONFIG_DDR4)
653 /* DminTx, areaTX */
654 ddr3_tip_bus_read(dev_num, if_id,
655 ACCESS_TYPE_UNICAST,
656 bus_id, DDR_PHY_DATA,
657 RESULT_PHY_REG +
658 csindex, &reg_data);
659 ddr3_tip_bus_read(dev_num, if_id,
660 ACCESS_TYPE_UNICAST,
661 dmin_phy_reg_table
662 [csindex * 5 + bus_id][0],
663 DDR_PHY_CONTROL,
664 dmin_phy_reg_table
665 [csindex * 5 + bus_id][1],
666 &reg_data1);
667 printf("%d,%d,", 2 * (reg_data1 & 0xFF),
668 reg_data);
669 /* DminRx, areaRX */
670 ddr3_tip_bus_read(dev_num, if_id,
671 ACCESS_TYPE_UNICAST,
672 bus_id, DDR_PHY_DATA,
673 RESULT_PHY_REG +
674 csindex + 4, &reg_data);
675 ddr3_tip_bus_read(dev_num, if_id,
676 ACCESS_TYPE_UNICAST,
677 dmin_phy_reg_table
678 [csindex * 5 + bus_id][0],
679 DDR_PHY_CONTROL,
680 dmin_phy_reg_table
681 [csindex * 5 + bus_id][1],
682 &reg_data1);
683 printf("%d,%d,", 2 * (reg_data1 >> 8),
684 reg_data);
685#else /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100686 ddr3_tip_bus_read(dev_num, if_id,
687 ACCESS_TYPE_UNICAST,
688 bus_id, DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200689 RESULT_PHY_REG +
Stefan Roese5ffceb82015-03-26 15:36:56 +0100690 csindex, &reg_data);
691 printf("%d,%d,", (reg_data & 0x1f),
692 ((reg_data & 0x3e0) >> 5));
Tony Dinhe2c524b2023-01-18 19:03:04 -0800693#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100694 /* WL */
695 ddr3_tip_bus_read(dev_num, if_id,
696 ACCESS_TYPE_UNICAST,
697 bus_id, DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200698 WL_PHY_REG(csindex),
699 &reg_data);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100700 printf("%d,%d,%d,",
701 (reg_data & 0x1f) +
702 ((reg_data & 0x1c0) >> 6) * 32,
703 (reg_data & 0x1f),
704 (reg_data & 0x1c0) >> 6);
705 /* RL */
706 CHECK_STATUS(ddr3_tip_if_read
707 (dev_num, ACCESS_TYPE_UNICAST,
708 if_id,
Chris Packham1a07d212018-05-10 13:28:29 +1200709 RD_DATA_SMPL_DLYS_REG,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100710 read_data, MASK_ALL_BITS));
711 read_data[if_id] =
712 (read_data[if_id] &
Chris Packham1a07d212018-05-10 13:28:29 +1200713 (0x1f << (8 * csindex))) >>
714 (8 * csindex);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100715 ddr3_tip_bus_read(dev_num, if_id,
716 ACCESS_TYPE_UNICAST, bus_id,
717 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200718 RL_PHY_REG(csindex),
Stefan Roese5ffceb82015-03-26 15:36:56 +0100719 &reg_data);
720 printf("%d,%d,%d,%d,",
721 (reg_data & 0x1f) +
722 ((reg_data & 0x1c0) >> 6) * 32 +
723 read_data[if_id] * 64,
724 (reg_data & 0x1f),
725 ((reg_data & 0x1c0) >> 6),
726 read_data[if_id]);
727 /* Centralization */
728 ddr3_tip_bus_read(dev_num, if_id,
729 ACCESS_TYPE_UNICAST, bus_id,
730 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200731 CTX_PHY_REG(csindex),
732 &reg_data);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100733 printf("%d,", (reg_data & 0x3f));
734 ddr3_tip_bus_read(dev_num, if_id,
735 ACCESS_TYPE_UNICAST, bus_id,
736 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200737 CRX_PHY_REG(csindex),
738 &reg_data);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100739 printf("%d,", (reg_data & 0x1f));
740 /* Vref */
741 ddr3_tip_bus_read(dev_num, if_id,
742 ACCESS_TYPE_UNICAST, bus_id,
743 DDR_PHY_DATA,
Chris Packham1a07d212018-05-10 13:28:29 +1200744 PAD_CFG_PHY_REG,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100745 &reg_data);
746 printf("%d,", (reg_data & 0x7));
747 /* DQVref */
748 /* Need to add the Read Function from device */
749 printf("%d,", 0);
Tony Dinhe2c524b2023-01-18 19:03:04 -0800750#if defined(CONFIG_DDR4)
751 printf("\t\t");
752 for (idx = 0; idx < 11; idx++) {
753 ddr3_tip_bus_read(dev_num, if_id,
754 ACCESS_TYPE_UNICAST,
755 bus_id, DDR_PHY_DATA,
756 0xd0 + 12 * csindex +
757 idx, &reg_data);
758 printf("%d,", (reg_data & 0x3f));
759 }
760#endif /* CONFIG_DDR4 */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100761 printf("\t\t");
762 for (idx = 0; idx < 11; idx++) {
763 ddr3_tip_bus_read(dev_num, if_id,
764 ACCESS_TYPE_UNICAST,
765 bus_id, DDR_PHY_DATA,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100766 0x10 +
767 16 * csindex +
768 idx, &reg_data);
769 printf("%d,", (reg_data & 0x3f));
770 }
771 printf("\t\t");
772 for (idx = 0; idx < 11; idx++) {
773 ddr3_tip_bus_read(dev_num, if_id,
774 ACCESS_TYPE_UNICAST,
775 bus_id, DDR_PHY_DATA,
776 0x50 +
777 16 * csindex +
778 idx, &reg_data);
779 printf("%d,", (reg_data & 0x3f));
780 }
781 }
782 }
783 }
784 printf("\n");
785
786 return MV_OK;
787}
Chris Packham1a07d212018-05-10 13:28:29 +1200788#endif /* EXCLUDE_DEBUG_PRINTS */
Stefan Roese5ffceb82015-03-26 15:36:56 +0100789
790/*
791 * Register XSB information
792 */
793int ddr3_tip_register_xsb_info(u32 dev_num, struct hws_xsb_info *xsb_info_table)
794{
795 memcpy(&xsb_info[dev_num], xsb_info_table, sizeof(struct hws_xsb_info));
796 return MV_OK;
797}
798
799/*
800 * Read ADLL Value
801 */
Chris Packham1a07d212018-05-10 13:28:29 +1200802int ddr3_tip_read_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
803 u32 reg_addr, u32 mask)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100804{
805 u32 data_value;
806 u32 if_id = 0, bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200807 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
808 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100809
810 /*
811 * multi CS support - reg_addr is calucalated in calling function
812 * with CS offset
813 */
814 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200815 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
816 for (bus_id = 0; bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100817 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200818 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100819 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
820 ACCESS_TYPE_UNICAST,
821 bus_id,
822 DDR_PHY_DATA, reg_addr,
823 &data_value));
824 pup_values[if_id *
Chris Packham1a07d212018-05-10 13:28:29 +1200825 octets_per_if_num + bus_id] =
Stefan Roese5ffceb82015-03-26 15:36:56 +0100826 data_value & mask;
827 }
828 }
829
830 return 0;
831}
832
833/*
834 * Write ADLL Value
835 */
Chris Packham1a07d212018-05-10 13:28:29 +1200836int ddr3_tip_write_adll_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
837 u32 reg_addr)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100838{
839 u32 if_id = 0, bus_id = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200840 u32 data;
841 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
842 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100843
844 /*
845 * multi CS support - reg_addr is calucalated in calling function
846 * with CS offset
847 */
848 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200849 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
850 for (bus_id = 0; bus_id < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100851 bus_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200852 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100853 data = pup_values[if_id *
Chris Packham1a07d212018-05-10 13:28:29 +1200854 octets_per_if_num +
Stefan Roese5ffceb82015-03-26 15:36:56 +0100855 bus_id];
856 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
857 ACCESS_TYPE_UNICAST,
858 if_id,
859 ACCESS_TYPE_UNICAST,
860 bus_id, DDR_PHY_DATA,
861 reg_addr, data));
862 }
863 }
864
865 return 0;
866}
867
Chris Packham1a07d212018-05-10 13:28:29 +1200868/**
869 * Read Phase Value
870 */
871int read_phase_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
872 int reg_addr, u32 mask)
873{
874 u32 data_value;
875 u32 if_id = 0, bus_id = 0;
876 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
877 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
878
879 /* multi CS support - reg_addr is calucalated in calling function with CS offset */
880 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
881 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
882 for (bus_id = 0; bus_id < octets_per_if_num; bus_id++) {
883 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
884 CHECK_STATUS(ddr3_tip_bus_read(dev_num, if_id,
885 ACCESS_TYPE_UNICAST,
886 bus_id,
887 DDR_PHY_DATA, reg_addr,
888 &data_value));
889 pup_values[if_id * octets_per_if_num + bus_id] = data_value & mask;
890 }
891 }
892
893 return 0;
894}
895
896/**
897 * Write Leveling Value
898 */
899int write_leveling_value(u32 dev_num, u32 pup_values[MAX_INTERFACE_NUM * MAX_BUS_NUM],
900 u32 pup_ph_values[MAX_INTERFACE_NUM * MAX_BUS_NUM], int reg_addr)
901{
902 u32 if_id = 0, bus_id = 0;
903 u32 data;
904 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
905 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
906
907 /* multi CS support - reg_addr is calucalated in calling function with CS offset */
908 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
909 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
910 for (bus_id = 0 ; bus_id < octets_per_if_num ; bus_id++) {
911 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_id);
912 data = pup_values[if_id * octets_per_if_num + bus_id] +
913 pup_ph_values[if_id * octets_per_if_num + bus_id];
914 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
915 ACCESS_TYPE_UNICAST,
916 if_id,
917 ACCESS_TYPE_UNICAST,
918 bus_id,
919 DDR_PHY_DATA,
920 reg_addr,
921 data));
922 }
923 }
924
925 return 0;
926}
927
928#if !defined(EXCLUDE_SWITCH_DEBUG)
929struct hws_tip_config_func_db config_func_info[MAX_DEVICE_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100930u32 start_xsb_offset = 0;
931u8 is_rl_old = 0;
932u8 is_freq_old = 0;
933u8 is_dfs_disabled = 0;
934u32 default_centrlization_value = 0x12;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100935u32 activate_select_before_run_alg = 1, activate_deselect_after_run_alg = 1,
936 rl_test = 0, reset_read_fifo = 0;
937int debug_acc = 0;
938u32 ctrl_sweepres[ADLL_LENGTH][MAX_INTERFACE_NUM][MAX_BUS_NUM];
939u32 ctrl_adll[MAX_CS_NUM * MAX_INTERFACE_NUM * MAX_BUS_NUM];
Stefan Roese5ffceb82015-03-26 15:36:56 +0100940
941u32 xsb_test_table[][8] = {
942 {0x00000000, 0x11111111, 0x22222222, 0x33333333, 0x44444444, 0x55555555,
943 0x66666666, 0x77777777},
944 {0x88888888, 0x99999999, 0xaaaaaaaa, 0xbbbbbbbb, 0xcccccccc, 0xdddddddd,
945 0xeeeeeeee, 0xffffffff},
946 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
947 0x00000000, 0xffffffff},
948 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
949 0x00000000, 0xffffffff},
950 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
951 0x00000000, 0xffffffff},
952 {0x00000000, 0xffffffff, 0x00000000, 0xffffffff, 0x00000000, 0xffffffff,
953 0x00000000, 0xffffffff},
954 {0x00000000, 0x00000000, 0xffffffff, 0xffffffff, 0x00000000, 0x00000000,
955 0xffffffff, 0xffffffff},
956 {0x00000000, 0x00000000, 0x00000000, 0xffffffff, 0x00000000, 0x00000000,
957 0x00000000, 0x00000000},
958 {0xffffffff, 0xffffffff, 0xffffffff, 0xffffffff, 0x00000000, 0xffffffff,
959 0xffffffff, 0xffffffff}
960};
961
Stefan Roese5ffceb82015-03-26 15:36:56 +0100962int ddr3_tip_print_adll(void)
963{
964 u32 bus_cnt = 0, if_id, data_p1, data_p2, ui_data3, dev_num = 0;
Chris Packham1a07d212018-05-10 13:28:29 +1200965 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
966 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +0100967
968 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200969 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
970 for (bus_cnt = 0; bus_cnt < octets_per_if_num;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100971 bus_cnt++) {
Chris Packham1a07d212018-05-10 13:28:29 +1200972 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, bus_cnt);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100973 CHECK_STATUS(ddr3_tip_bus_read
974 (dev_num, if_id,
975 ACCESS_TYPE_UNICAST, bus_cnt,
976 DDR_PHY_DATA, 0x1, &data_p1));
977 CHECK_STATUS(ddr3_tip_bus_read
978 (dev_num, if_id, ACCESS_TYPE_UNICAST,
979 bus_cnt, DDR_PHY_DATA, 0x2, &data_p2));
980 CHECK_STATUS(ddr3_tip_bus_read
981 (dev_num, if_id, ACCESS_TYPE_UNICAST,
982 bus_cnt, DDR_PHY_DATA, 0x3, &ui_data3));
983 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
984 (" IF %d bus_cnt %d phy_reg_1_data 0x%x phy_reg_2_data 0x%x phy_reg_3_data 0x%x\n",
985 if_id, bus_cnt, data_p1, data_p2,
986 ui_data3));
987 }
988 }
989
990 return MV_OK;
991}
992
Chris Packham1a07d212018-05-10 13:28:29 +1200993#endif /* EXCLUDE_SWITCH_DEBUG */
994
995#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +0100996/*
997 * Print ADLL
998 */
999int print_adll(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1000{
1001 u32 i, j;
Chris Packham1a07d212018-05-10 13:28:29 +12001002 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1003 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +01001004
Chris Packham1a07d212018-05-10 13:28:29 +12001005 for (j = 0; j < octets_per_if_num; j++) {
1006 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1007 for (i = 0; i < MAX_INTERFACE_NUM; i++)
1008 printf("%d ,", adll[i * octets_per_if_num + j]);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001009 }
1010 printf("\n");
1011
1012 return MV_OK;
1013}
Stefan Roese5ffceb82015-03-26 15:36:56 +01001014
Chris Packham1a07d212018-05-10 13:28:29 +12001015int print_ph(u32 dev_num, u32 adll[MAX_INTERFACE_NUM * MAX_BUS_NUM])
1016{
1017 u32 i, j;
1018 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1019 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1020
1021 for (j = 0; j < octets_per_if_num; j++) {
1022 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, j);
1023 for (i = 0; i < MAX_INTERFACE_NUM; i++)
1024 printf("%d ,", adll[i * octets_per_if_num + j] >> 6);
1025 }
1026 printf("\n");
1027
1028 return MV_OK;
1029}
1030#endif /* DDR_VIEWER_TOOL */
1031
1032#if !defined(EXCLUDE_SWITCH_DEBUG)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001033/* byte_index - only byte 0, 1, 2, or 3, oxff - test all bytes */
1034static u32 ddr3_tip_compare(u32 if_id, u32 *p_src, u32 *p_dst,
1035 u32 byte_index)
1036{
1037 u32 burst_cnt = 0, addr_offset, i_id;
1038 int b_is_fail = 0;
1039
1040 addr_offset =
1041 (byte_index ==
1042 0xff) ? (u32) 0xffffffff : (u32) (0xff << (byte_index * 8));
1043 for (burst_cnt = 0; burst_cnt < EXT_ACCESS_BURST_LENGTH; burst_cnt++) {
1044 if ((p_src[burst_cnt] & addr_offset) !=
Chris Packham1a07d212018-05-10 13:28:29 +12001045 (p_dst[if_id] & addr_offset))
Stefan Roese5ffceb82015-03-26 15:36:56 +01001046 b_is_fail = 1;
1047 }
1048
1049 if (b_is_fail == 1) {
1050 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1051 ("IF %d exp: ", if_id));
1052 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1053 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1054 ("0x%8x ", p_src[i_id]));
1055 }
1056 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1057 ("\n_i_f %d rcv: ", if_id));
1058 for (i_id = 0; i_id <= MAX_INTERFACE_NUM - 1; i_id++) {
1059 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1060 ("(0x%8x ", p_dst[i_id]));
1061 }
1062 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("\n "));
1063 }
1064
1065 return b_is_fail;
1066}
Chris Packham1a07d212018-05-10 13:28:29 +12001067#endif /* EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001068
Chris Packham1a07d212018-05-10 13:28:29 +12001069#if defined(DDR_VIEWER_TOOL)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001070/*
1071 * Sweep validation
1072 */
1073int ddr3_tip_run_sweep_test(int dev_num, u32 repeat_num, u32 direction,
1074 u32 mode)
1075{
1076 u32 pup = 0, start_pup = 0, end_pup = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001077 u32 adll = 0, rep = 0, pattern_idx = 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001078 u32 res[MAX_INTERFACE_NUM] = { 0 };
1079 int if_id = 0;
1080 u32 adll_value = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001081 u32 reg;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001082 enum hws_access_type pup_access;
1083 u32 cs;
Chris Packham4bf81db2018-12-03 14:26:49 +13001084 unsigned int max_cs = mv_ddr_cs_num_get();
Chris Packham1a07d212018-05-10 13:28:29 +12001085 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1086 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1087
1088 repeat_num = 2;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001089
Stefan Roese5ffceb82015-03-26 15:36:56 +01001090 if (mode == 1) {
1091 /* per pup */
1092 start_pup = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001093 end_pup = octets_per_if_num - 1;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001094 pup_access = ACCESS_TYPE_UNICAST;
1095 } else {
1096 start_pup = 0;
1097 end_pup = 0;
1098 pup_access = ACCESS_TYPE_MULTICAST;
1099 }
1100
1101 for (cs = 0; cs < max_cs; cs++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001102 reg = (direction == 0) ? CTX_PHY_REG(cs) : CRX_PHY_REG(cs);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001103 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1104 for (if_id = 0;
1105 if_id <= MAX_INTERFACE_NUM - 1;
1106 if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001107 VALIDATE_IF_ACTIVE
Stefan Roese5ffceb82015-03-26 15:36:56 +01001108 (tm->if_act_mask,
1109 if_id);
1110 for (pup = start_pup; pup <= end_pup; pup++) {
1111 ctrl_sweepres[adll][if_id][pup] =
1112 0;
1113 }
1114 }
1115 }
1116
1117 for (adll = 0; adll < (MAX_INTERFACE_NUM * MAX_BUS_NUM); adll++)
1118 ctrl_adll[adll] = 0;
Chris Packham1a07d212018-05-10 13:28:29 +12001119 /* Save DQS value(after algorithm run) */
1120 ddr3_tip_read_adll_value(dev_num, ctrl_adll,
1121 reg, MASK_ALL_BITS);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001122
1123 /*
1124 * Sweep ADLL from 0:31 on all I/F on all Pup and perform
1125 * BIST on each stage.
1126 */
1127 for (pup = start_pup; pup <= end_pup; pup++) {
1128 for (adll = 0; adll < ADLL_LENGTH; adll++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001129 for (rep = 0; rep < repeat_num; rep++) {
1130 for (pattern_idx = PATTERN_KILLER_DQ0;
1131 pattern_idx < PATTERN_LAST;
1132 pattern_idx++) {
1133 adll_value =
1134 (direction == 0) ? (adll * 2) : adll;
1135 CHECK_STATUS(ddr3_tip_bus_write
1136 (dev_num, ACCESS_TYPE_MULTICAST, 0,
1137 pup_access, pup, DDR_PHY_DATA,
1138 reg, adll_value));
1139 hws_ddr3_run_bist(dev_num, sweep_pattern, res,
1140 cs);
1141 /* ddr3_tip_reset_fifo_ptr(dev_num); */
1142 for (if_id = 0;
1143 if_id < MAX_INTERFACE_NUM;
1144 if_id++) {
1145 VALIDATE_IF_ACTIVE
1146 (tm->if_act_mask,
1147 if_id);
1148 ctrl_sweepres[adll][if_id][pup]
1149 += res[if_id];
1150 if (mode == 1) {
1151 CHECK_STATUS
1152 (ddr3_tip_bus_write
1153 (dev_num,
1154 ACCESS_TYPE_UNICAST,
1155 if_id,
1156 ACCESS_TYPE_UNICAST,
1157 pup,
1158 DDR_PHY_DATA,
1159 reg,
1160 ctrl_adll[if_id *
1161 cs *
1162 octets_per_if_num
1163 + pup]));
1164 }
1165 }
Stefan Roese5ffceb82015-03-26 15:36:56 +01001166 }
1167 }
1168 }
1169 }
1170 printf("Final, CS %d,%s, Sweep, Result, Adll,", cs,
1171 ((direction == 0) ? "TX" : "RX"));
1172 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001173 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001174 if (mode == 1) {
1175 for (pup = start_pup; pup <= end_pup; pup++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001176 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001177 printf("I/F%d-PHY%d , ", if_id, pup);
1178 }
1179 } else {
1180 printf("I/F%d , ", if_id);
1181 }
1182 }
1183 printf("\n");
1184
1185 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1186 adll_value = (direction == 0) ? (adll * 2) : adll;
1187 printf("Final,%s, Sweep, Result, %d ,",
1188 ((direction == 0) ? "TX" : "RX"), adll_value);
1189
1190 for (if_id = 0;
1191 if_id <= MAX_INTERFACE_NUM - 1;
1192 if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001193 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001194 for (pup = start_pup; pup <= end_pup; pup++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001195 printf("%8d , ",
Stefan Roese5ffceb82015-03-26 15:36:56 +01001196 ctrl_sweepres[adll][if_id]
1197 [pup]);
1198 }
1199 }
1200 printf("\n");
1201 }
1202
1203 /*
1204 * Write back to the phy the Rx DQS value, we store in
1205 * the beginning.
1206 */
Chris Packham1a07d212018-05-10 13:28:29 +12001207 ddr3_tip_write_adll_value(dev_num, ctrl_adll, reg);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001208 /* print adll results */
Chris Packham1a07d212018-05-10 13:28:29 +12001209 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001210 printf("%s, DQS, ADLL,,,", (direction == 0) ? "Tx" : "Rx");
1211 print_adll(dev_num, ctrl_adll);
Chris Packham1a07d212018-05-10 13:28:29 +12001212 }
1213 ddr3_tip_reset_fifo_ptr(dev_num);
1214
1215 return 0;
1216}
1217
1218#if defined(EXCLUDE_SWITCH_DEBUG)
1219int ddr3_tip_run_leveling_sweep_test(int dev_num, u32 repeat_num,
1220 u32 direction, u32 mode)
1221{
1222 u32 pup = 0, start_pup = 0, end_pup = 0, start_adll = 0;
1223 u32 adll = 0, rep = 0, pattern_idx = 0;
1224 u32 read_data[MAX_INTERFACE_NUM];
1225 u32 res[MAX_INTERFACE_NUM] = { 0 };
1226 int if_id = 0, gap = 0;
1227 u32 adll_value = 0;
1228 u32 reg;
1229 enum hws_access_type pup_access;
1230 u32 cs;
Chris Packham4bf81db2018-12-03 14:26:49 +13001231 unsigned int max_cs = mv_ddr_cs_num_get();
Chris Packham1a07d212018-05-10 13:28:29 +12001232 u32 octets_per_if_num = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE);
1233 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
1234
1235 if (mode == 1) { /* per pup */
1236 start_pup = 0;
1237 end_pup = octets_per_if_num - 1;
1238 pup_access = ACCESS_TYPE_UNICAST;
1239 } else {
1240 start_pup = 0;
1241 end_pup = 0;
1242 pup_access = ACCESS_TYPE_MULTICAST;
1243 }
1244
1245 for (cs = 0; cs < max_cs; cs++) {
1246 reg = (direction == 0) ? WL_PHY_REG(cs) : RL_PHY_REG(cs);
1247 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1248 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1249 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1250 for (pup = start_pup; pup <= end_pup; pup++)
1251 ctrl_sweepres[adll][if_id][pup] = 0;
1252 }
1253 }
1254
1255 for (adll = 0; adll < MAX_INTERFACE_NUM * MAX_BUS_NUM; adll++) {
1256 ctrl_adll[adll] = 0;
1257 ctrl_level_phase[adll] = 0;
1258 ctrl_adll1[adll] = 0;
1259 }
1260
1261 /* save leveling value after running algorithm */
1262 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, 0x1f);
1263 read_phase_value(dev_num, ctrl_level_phase, reg, 0x7 << 6);
1264
1265 if (direction == 0)
1266 ddr3_tip_read_adll_value(dev_num, ctrl_adll1,
1267 CTX_PHY_REG(cs), MASK_ALL_BITS);
1268
1269 /* Sweep ADLL from 0 to 31 on all interfaces, all pups,
1270 * and perform BIST on each stage
1271 */
1272 for (pup = start_pup; pup <= end_pup; pup++) {
1273 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1274 for (rep = 0; rep < repeat_num; rep++) {
1275 adll_value = (direction == 0) ? (adll * 2) : (adll * 3);
1276 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1277 start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1278 (ctrl_level_phase[if_id * cs *
1279 octets_per_if_num +
1280 pup] >> 6) * 32;
1281
1282 if (direction == 0)
1283 start_adll = (start_adll > 32) ? (start_adll - 32) : 0;
1284 else
1285 start_adll = (start_adll > 48) ? (start_adll - 48) : 0;
1286
1287 adll_value += start_adll;
1288
1289 gap = ctrl_adll1[if_id * cs * octets_per_if_num + pup] -
1290 ctrl_adll[if_id * cs * octets_per_if_num + pup];
1291 gap = (((adll_value % 32) + gap) % 64);
1292
1293 adll_value = ((adll_value % 32) +
1294 (((adll_value - (adll_value % 32)) / 32) << 6));
1295
1296 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1297 ACCESS_TYPE_UNICAST,
1298 if_id,
1299 pup_access,
1300 pup,
1301 DDR_PHY_DATA,
1302 reg,
1303 adll_value));
1304 if (direction == 0)
1305 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1306 ACCESS_TYPE_UNICAST,
1307 if_id,
1308 pup_access,
1309 pup,
1310 DDR_PHY_DATA,
1311 CTX_PHY_REG(cs),
1312 gap));
1313 }
1314
1315 for (pattern_idx = PATTERN_KILLER_DQ0;
1316 pattern_idx < PATTERN_LAST;
1317 pattern_idx++) {
1318 hws_ddr3_run_bist(dev_num, sweep_pattern, res, cs);
1319 ddr3_tip_reset_fifo_ptr(dev_num);
1320 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1321 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1322 if (pup != 4) { /* TODO: remove literal */
1323 ctrl_sweepres[adll][if_id][pup] += res[if_id];
1324 } else {
1325 CHECK_STATUS(ddr3_tip_if_read(dev_num,
1326 ACCESS_TYPE_UNICAST,
1327 if_id,
1328 0x1458,
1329 read_data,
1330 MASK_ALL_BITS));
1331 ctrl_sweepres[adll][if_id][pup] += read_data[if_id];
1332 CHECK_STATUS(ddr3_tip_if_write(dev_num,
1333 ACCESS_TYPE_UNICAST,
1334 if_id,
1335 0x1458,
1336 0x0,
1337 0xFFFFFFFF));
1338 CHECK_STATUS(ddr3_tip_if_write(dev_num,
1339 ACCESS_TYPE_UNICAST,
1340 if_id,
1341 0x145C,
1342 0x0,
1343 0xFFFFFFFF));
1344 }
1345 }
1346 }
1347 }
1348 }
1349
1350 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1351 start_adll = ctrl_adll[if_id * cs * octets_per_if_num + pup] +
1352 ctrl_level_phase[if_id * cs * octets_per_if_num + pup];
1353 CHECK_STATUS(ddr3_tip_bus_write(dev_num, ACCESS_TYPE_UNICAST, if_id, pup_access, pup,
1354 DDR_PHY_DATA, reg, start_adll));
1355 if (direction == 0)
1356 CHECK_STATUS(ddr3_tip_bus_write(dev_num,
1357 ACCESS_TYPE_UNICAST,
1358 if_id,
1359 pup_access,
1360 pup,
1361 DDR_PHY_DATA,
1362 CTX_PHY_REG(cs),
1363 ctrl_adll1[if_id *
1364 cs *
1365 octets_per_if_num +
1366 pup]));
1367 }
1368 }
1369
1370 printf("Final,CS %d,%s,Leveling,Result,Adll,", cs, ((direction == 0) ? "TX" : "RX"));
1371
1372 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1373 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1374 if (mode == 1) {
1375 for (pup = start_pup; pup <= end_pup; pup++) {
1376 VALIDATE_BUS_ACTIVE(tm->bus_act_mask, pup);
1377 printf("I/F%d-PHY%d , ", if_id, pup);
1378 }
1379 } else {
1380 printf("I/F%d , ", if_id);
1381 }
1382 }
1383 printf("\n");
1384
1385 for (adll = 0; adll < ADLL_LENGTH; adll++) {
1386 adll_value = (direction == 0) ? ((adll * 2) - 32) : ((adll * 3) - 48);
1387 printf("Final,%s,LevelingSweep,Result, %d ,", ((direction == 0) ? "TX" : "RX"), adll_value);
1388
1389 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1390 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
1391 for (pup = start_pup; pup <= end_pup; pup++)
1392 printf("%8d , ", ctrl_sweepres[adll][if_id][pup]);
1393 }
1394 printf("\n");
1395 }
1396
1397 /* write back to the phy the Rx DQS value, we store in the beginning */
1398 write_leveling_value(dev_num, ctrl_adll, ctrl_level_phase, reg);
1399 if (direction == 0)
1400 ddr3_tip_write_adll_value(dev_num, ctrl_adll1, CTX_PHY_REG(cs));
1401
1402 /* print adll results */
1403 ddr3_tip_read_adll_value(dev_num, ctrl_adll, reg, MASK_ALL_BITS);
1404 printf("%s,DQS,Leveling,,,", (direction == 0) ? "Tx" : "Rx");
1405 print_adll(dev_num, ctrl_adll);
1406 print_ph(dev_num, ctrl_level_phase);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001407 }
1408 ddr3_tip_reset_fifo_ptr(dev_num);
1409
1410 return 0;
1411}
Chris Packham1a07d212018-05-10 13:28:29 +12001412#endif /* EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001413
Chris Packham1a07d212018-05-10 13:28:29 +12001414void print_topology(struct mv_ddr_topology_map *topology_db)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001415{
1416 u32 ui, uj;
Chris Packham1a07d212018-05-10 13:28:29 +12001417 u32 dev_num = 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001418
1419 printf("\tinterface_mask: 0x%x\n", topology_db->if_act_mask);
Chris Packham1a07d212018-05-10 13:28:29 +12001420 printf("\tNumber of buses: 0x%x\n",
1421 ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE));
Stefan Roese5ffceb82015-03-26 15:36:56 +01001422 printf("\tbus_act_mask: 0x%x\n", topology_db->bus_act_mask);
1423
1424 for (ui = 0; ui < MAX_INTERFACE_NUM; ui++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001425 VALIDATE_IF_ACTIVE(topology_db->if_act_mask, ui);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001426 printf("\n\tInterface ID: %d\n", ui);
1427 printf("\t\tDDR Frequency: %s\n",
1428 convert_freq(topology_db->
1429 interface_params[ui].memory_freq));
1430 printf("\t\tSpeed_bin: %d\n",
1431 topology_db->interface_params[ui].speed_bin_index);
1432 printf("\t\tBus_width: %d\n",
1433 (4 << topology_db->interface_params[ui].bus_width));
1434 printf("\t\tMem_size: %s\n",
1435 convert_mem_size(topology_db->
1436 interface_params[ui].memory_size));
1437 printf("\t\tCAS-WL: %d\n",
1438 topology_db->interface_params[ui].cas_wl);
1439 printf("\t\tCAS-L: %d\n",
1440 topology_db->interface_params[ui].cas_l);
1441 printf("\t\tTemperature: %d\n",
1442 topology_db->interface_params[ui].interface_temp);
1443 printf("\n");
1444 for (uj = 0; uj < 4; uj++) {
1445 printf("\t\tBus %d parameters- CS Mask: 0x%x\t", uj,
1446 topology_db->interface_params[ui].
1447 as_bus_params[uj].cs_bitmask);
1448 printf("Mirror: 0x%x\t",
1449 topology_db->interface_params[ui].
1450 as_bus_params[uj].mirror_enable_bitmask);
1451 printf("DQS Swap is %s \t",
1452 (topology_db->
1453 interface_params[ui].as_bus_params[uj].
1454 is_dqs_swap == 1) ? "enabled" : "disabled");
1455 printf("Ck Swap:%s\t",
1456 (topology_db->
1457 interface_params[ui].as_bus_params[uj].
1458 is_ck_swap == 1) ? "enabled" : "disabled");
1459 printf("\n");
1460 }
1461 }
1462}
Chris Packham1a07d212018-05-10 13:28:29 +12001463#endif /* DDR_VIEWER_TOOL */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001464
Chris Packham1a07d212018-05-10 13:28:29 +12001465#if !defined(EXCLUDE_SWITCH_DEBUG)
Stefan Roese5ffceb82015-03-26 15:36:56 +01001466/*
1467 * Execute XSB Test transaction (rd/wr/both)
1468 */
1469int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1470 u32 read_type, u32 burst_length)
1471{
1472 u32 seq = 0, if_id = 0, addr, cnt;
1473 int ret = MV_OK, ret_tmp;
1474 u32 data_read[MAX_INTERFACE_NUM];
Chris Packham1a07d212018-05-10 13:28:29 +12001475 struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get();
Stefan Roese5ffceb82015-03-26 15:36:56 +01001476
1477 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
Chris Packham1a07d212018-05-10 13:28:29 +12001478 VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001479 addr = mem_addr;
1480 for (cnt = 0; cnt <= burst_length; cnt++) {
1481 seq = (seq + 1) % 8;
1482 if (write_type != 0) {
1483 CHECK_STATUS(ddr3_tip_ext_write
1484 (dev_num, if_id, addr, 1,
1485 xsb_test_table[seq]));
1486 }
1487 if (read_type != 0) {
1488 CHECK_STATUS(ddr3_tip_ext_read
1489 (dev_num, if_id, addr, 1,
1490 data_read));
1491 }
1492 if ((read_type != 0) && (write_type != 0)) {
1493 ret_tmp =
1494 ddr3_tip_compare(if_id,
1495 xsb_test_table[seq],
1496 data_read,
1497 0xff);
1498 addr += (EXT_ACCESS_BURST_LENGTH * 4);
1499 ret = (ret != MV_OK) ? ret : ret_tmp;
1500 }
1501 }
1502 }
1503
1504 return ret;
1505}
1506
1507#else /*EXCLUDE_SWITCH_DEBUG */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001508u32 start_xsb_offset = 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001509
1510int run_xsb_test(u32 dev_num, u32 mem_addr, u32 write_type,
1511 u32 read_type, u32 burst_length)
1512{
1513 return MV_OK;
1514}
1515
Chris Packham1a07d212018-05-10 13:28:29 +12001516#endif /* EXCLUDE_SWITCH_DEBUG */