blob: ef471e565efd1103a70b1af3df44906b40eef167 [file] [log] [blame]
Stefan Roese5ffceb82015-03-26 15:36:56 +01001/*
2 * Copyright (C) Marvell International Ltd. and its affiliates
3 *
4 * SPDX-License-Identifier: GPL-2.0
5 */
6
7#include <common.h>
8#include <spl.h>
9#include <asm/io.h>
10#include <asm/arch/cpu.h>
11#include <asm/arch/soc.h>
12
13#include "ddr3_init.h"
14
15#define GET_MAX_VALUE(x, y) \
16 ((x) > (y)) ? (x) : (y)
17#define CEIL_DIVIDE(x, y) \
18 ((x - (x / y) * y) == 0) ? ((x / y) - 1) : (x / y)
19
20#define TIME_2_CLOCK_CYCLES CEIL_DIVIDE
21
22#define GET_CS_FROM_MASK(mask) (cs_mask2_num[mask])
23#define CS_CBE_VALUE(cs_num) (cs_cbe_reg[cs_num])
24
Chris Packham5450f0c2018-01-18 17:16:10 +130025#define TIMES_9_TREFI_CYCLES 0x8
26
Stefan Roese5ffceb82015-03-26 15:36:56 +010027u32 window_mem_addr = 0;
28u32 phy_reg0_val = 0;
29u32 phy_reg1_val = 8;
30u32 phy_reg2_val = 0;
31u32 phy_reg3_val = 0xa;
32enum hws_ddr_freq init_freq = DDR_FREQ_667;
33enum hws_ddr_freq low_freq = DDR_FREQ_LOW_FREQ;
34enum hws_ddr_freq medium_freq;
35u32 debug_dunit = 0;
36u32 odt_additional = 1;
37u32 *dq_map_table = NULL;
38u32 odt_config = 1;
39
40#if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ALLEYCAT3) || \
41 defined(CONFIG_ARMADA_39X)
42u32 is_pll_before_init = 0, is_adll_calib_before_init = 0, is_dfs_in_init = 0;
43u32 dfs_low_freq = 130;
44#else
45u32 is_pll_before_init = 0, is_adll_calib_before_init = 1, is_dfs_in_init = 0;
46u32 dfs_low_freq = 100;
47#endif
48u32 g_rtt_nom_c_s0, g_rtt_nom_c_s1;
49u8 calibration_update_control; /* 2 external only, 1 is internal only */
50
51enum hws_result training_result[MAX_STAGE_LIMIT][MAX_INTERFACE_NUM];
52enum auto_tune_stage training_stage = INIT_CONTROLLER;
53u32 finger_test = 0, p_finger_start = 11, p_finger_end = 64,
54 n_finger_start = 11, n_finger_end = 64,
55 p_finger_step = 3, n_finger_step = 3;
56u32 clamp_tbl[] = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 };
57
58/* Initiate to 0xff, this variable is define by user in debug mode */
59u32 mode2_t = 0xff;
60u32 xsb_validate_type = 0;
61u32 xsb_validation_base_address = 0xf000;
62u32 first_active_if = 0;
63u32 dfs_low_phy1 = 0x1f;
64u32 multicast_id = 0;
65int use_broadcast = 0;
66struct hws_tip_freq_config_info *freq_info_table = NULL;
67u8 is_cbe_required = 0;
68u32 debug_mode = 0;
69u32 delay_enable = 0;
70int rl_mid_freq_wa = 0;
71
72u32 effective_cs = 0;
73
74u32 mask_tune_func = (SET_MEDIUM_FREQ_MASK_BIT |
75 WRITE_LEVELING_MASK_BIT |
76 LOAD_PATTERN_2_MASK_BIT |
77 READ_LEVELING_MASK_BIT |
78 SET_TARGET_FREQ_MASK_BIT | WRITE_LEVELING_TF_MASK_BIT |
79 READ_LEVELING_TF_MASK_BIT |
80 CENTRALIZATION_RX_MASK_BIT | CENTRALIZATION_TX_MASK_BIT);
81
82void ddr3_print_version(void)
83{
84 printf(DDR3_TIP_VERSION_STRING);
85}
86
87static int ddr3_tip_ddr3_training_main_flow(u32 dev_num);
88static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type,
89 u32 if_id, u32 cl_value, u32 cwl_value);
90static int ddr3_tip_ddr3_auto_tune(u32 dev_num);
91static int is_bus_access_done(u32 dev_num, u32 if_id,
92 u32 dunit_reg_adrr, u32 bit);
93#ifdef ODT_TEST_SUPPORT
94static int odt_test(u32 dev_num, enum hws_algo_type algo_type);
95#endif
96
97int adll_calibration(u32 dev_num, enum hws_access_type access_type,
98 u32 if_id, enum hws_ddr_freq frequency);
99static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type,
100 u32 if_id, enum hws_ddr_freq frequency);
101
102static struct page_element page_param[] = {
103 /*
104 * 8bits 16 bits
105 * page-size(K) page-size(K) mask
106 */
107 { 1, 2, 2},
108 /* 512M */
109 { 1, 2, 3},
110 /* 1G */
111 { 1, 2, 0},
112 /* 2G */
113 { 1, 2, 4},
114 /* 4G */
115 { 2, 2, 5}
116 /* 8G */
117};
118
119static u8 mem_size_config[MEM_SIZE_LAST] = {
120 0x2, /* 512Mbit */
121 0x3, /* 1Gbit */
122 0x0, /* 2Gbit */
123 0x4, /* 4Gbit */
124 0x5 /* 8Gbit */
125};
126
127static u8 cs_mask2_num[] = { 0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3 };
128
129static struct reg_data odpg_default_value[] = {
130 {0x1034, 0x38000, MASK_ALL_BITS},
131 {0x1038, 0x0, MASK_ALL_BITS},
132 {0x10b0, 0x0, MASK_ALL_BITS},
133 {0x10b8, 0x0, MASK_ALL_BITS},
134 {0x10c0, 0x0, MASK_ALL_BITS},
135 {0x10f0, 0x0, MASK_ALL_BITS},
136 {0x10f4, 0x0, MASK_ALL_BITS},
137 {0x10f8, 0xff, MASK_ALL_BITS},
138 {0x10fc, 0xffff, MASK_ALL_BITS},
139 {0x1130, 0x0, MASK_ALL_BITS},
140 {0x1830, 0x2000000, MASK_ALL_BITS},
141 {0x14d0, 0x0, MASK_ALL_BITS},
142 {0x14d4, 0x0, MASK_ALL_BITS},
143 {0x14d8, 0x0, MASK_ALL_BITS},
144 {0x14dc, 0x0, MASK_ALL_BITS},
145 {0x1454, 0x0, MASK_ALL_BITS},
146 {0x1594, 0x0, MASK_ALL_BITS},
147 {0x1598, 0x0, MASK_ALL_BITS},
148 {0x159c, 0x0, MASK_ALL_BITS},
149 {0x15a0, 0x0, MASK_ALL_BITS},
150 {0x15a4, 0x0, MASK_ALL_BITS},
151 {0x15a8, 0x0, MASK_ALL_BITS},
152 {0x15ac, 0x0, MASK_ALL_BITS},
153 {0x1604, 0x0, MASK_ALL_BITS},
154 {0x1608, 0x0, MASK_ALL_BITS},
155 {0x160c, 0x0, MASK_ALL_BITS},
156 {0x1610, 0x0, MASK_ALL_BITS},
157 {0x1614, 0x0, MASK_ALL_BITS},
158 {0x1618, 0x0, MASK_ALL_BITS},
159 {0x1624, 0x0, MASK_ALL_BITS},
160 {0x1690, 0x0, MASK_ALL_BITS},
161 {0x1694, 0x0, MASK_ALL_BITS},
162 {0x1698, 0x0, MASK_ALL_BITS},
163 {0x169c, 0x0, MASK_ALL_BITS},
164 {0x14b8, 0x6f67, MASK_ALL_BITS},
165 {0x1630, 0x0, MASK_ALL_BITS},
166 {0x1634, 0x0, MASK_ALL_BITS},
167 {0x1638, 0x0, MASK_ALL_BITS},
168 {0x163c, 0x0, MASK_ALL_BITS},
169 {0x16b0, 0x0, MASK_ALL_BITS},
170 {0x16b4, 0x0, MASK_ALL_BITS},
171 {0x16b8, 0x0, MASK_ALL_BITS},
172 {0x16bc, 0x0, MASK_ALL_BITS},
173 {0x16c0, 0x0, MASK_ALL_BITS},
174 {0x16c4, 0x0, MASK_ALL_BITS},
175 {0x16c8, 0x0, MASK_ALL_BITS},
176 {0x16cc, 0x1, MASK_ALL_BITS},
177 {0x16f0, 0x1, MASK_ALL_BITS},
178 {0x16f4, 0x0, MASK_ALL_BITS},
179 {0x16f8, 0x0, MASK_ALL_BITS},
180 {0x16fc, 0x0, MASK_ALL_BITS}
181};
182
183static int ddr3_tip_bus_access(u32 dev_num, enum hws_access_type interface_access,
184 u32 if_id, enum hws_access_type phy_access,
185 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr,
186 u32 data_value, enum hws_operation oper_type);
187static int ddr3_tip_pad_inv(u32 dev_num, u32 if_id);
188static int ddr3_tip_rank_control(u32 dev_num, u32 if_id);
189
190/*
191 * Update global training parameters by data from user
192 */
193int ddr3_tip_tune_training_params(u32 dev_num,
194 struct tune_train_params *params)
195{
196 if (params->ck_delay != -1)
197 ck_delay = params->ck_delay;
198 if (params->ck_delay_16 != -1)
199 ck_delay_16 = params->ck_delay_16;
200 if (params->phy_reg3_val != -1)
201 phy_reg3_val = params->phy_reg3_val;
202
203 return MV_OK;
204}
205
206/*
207 * Configure CS
208 */
209int ddr3_tip_configure_cs(u32 dev_num, u32 if_id, u32 cs_num, u32 enable)
210{
211 u32 data, addr_hi, data_high;
212 u32 mem_index;
213 struct hws_topology_map *tm = ddr3_get_topology_map();
214
215 if (enable == 1) {
216 data = (tm->interface_params[if_id].bus_width ==
217 BUS_WIDTH_8) ? 0 : 1;
218 CHECK_STATUS(ddr3_tip_if_write
219 (dev_num, ACCESS_TYPE_UNICAST, if_id,
220 SDRAM_ACCESS_CONTROL_REG, (data << (cs_num * 4)),
221 0x3 << (cs_num * 4)));
222 mem_index = tm->interface_params[if_id].memory_size;
223
224 addr_hi = mem_size_config[mem_index] & 0x3;
225 CHECK_STATUS(ddr3_tip_if_write
226 (dev_num, ACCESS_TYPE_UNICAST, if_id,
227 SDRAM_ACCESS_CONTROL_REG,
228 (addr_hi << (2 + cs_num * 4)),
229 0x3 << (2 + cs_num * 4)));
230
231 data_high = (mem_size_config[mem_index] & 0x4) >> 2;
232 CHECK_STATUS(ddr3_tip_if_write
233 (dev_num, ACCESS_TYPE_UNICAST, if_id,
234 SDRAM_ACCESS_CONTROL_REG,
235 data_high << (20 + cs_num), 1 << (20 + cs_num)));
236
237 /* Enable Address Select Mode */
238 CHECK_STATUS(ddr3_tip_if_write
239 (dev_num, ACCESS_TYPE_UNICAST, if_id,
240 SDRAM_ACCESS_CONTROL_REG, 1 << (16 + cs_num),
241 1 << (16 + cs_num)));
242 }
243 switch (cs_num) {
244 case 0:
245 case 1:
246 case 2:
247 CHECK_STATUS(ddr3_tip_if_write
248 (dev_num, ACCESS_TYPE_UNICAST, if_id,
249 DDR_CONTROL_LOW_REG, (enable << (cs_num + 11)),
250 1 << (cs_num + 11)));
251 break;
252 case 3:
253 CHECK_STATUS(ddr3_tip_if_write
254 (dev_num, ACCESS_TYPE_UNICAST, if_id,
255 DDR_CONTROL_LOW_REG, (enable << 15), 1 << 15));
256 break;
257 }
258
259 return MV_OK;
260}
261
262/*
263 * Calculate number of CS
264 */
265static int calc_cs_num(u32 dev_num, u32 if_id, u32 *cs_num)
266{
267 u32 cs;
268 u32 bus_cnt;
269 u32 cs_count;
270 u32 cs_bitmask;
271 u32 curr_cs_num = 0;
272 struct hws_topology_map *tm = ddr3_get_topology_map();
273
274 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) {
275 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
276 cs_count = 0;
277 cs_bitmask = tm->interface_params[if_id].
278 as_bus_params[bus_cnt].cs_bitmask;
279 for (cs = 0; cs < MAX_CS_NUM; cs++) {
280 if ((cs_bitmask >> cs) & 1)
281 cs_count++;
282 }
283
284 if (curr_cs_num == 0) {
285 curr_cs_num = cs_count;
286 } else if (cs_count != curr_cs_num) {
287 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
288 ("CS number is different per bus (IF %d BUS %d cs_num %d curr_cs_num %d)\n",
289 if_id, bus_cnt, cs_count,
290 curr_cs_num));
291 return MV_NOT_SUPPORTED;
292 }
293 }
294 *cs_num = curr_cs_num;
295
296 return MV_OK;
297}
298
299/*
300 * Init Controller Flow
301 */
302int hws_ddr3_tip_init_controller(u32 dev_num, struct init_cntr_param *init_cntr_prm)
303{
304 u32 if_id;
305 u32 cs_num;
306 u32 t_refi = 0, t_hclk = 0, t_ckclk = 0, t_faw = 0, t_pd = 0,
307 t_wr = 0, t2t = 0, txpdll = 0;
308 u32 data_value = 0, bus_width = 0, page_size = 0, cs_cnt = 0,
309 mem_mask = 0, bus_index = 0;
310 enum hws_speed_bin speed_bin_index = SPEED_BIN_DDR_2133N;
311 enum hws_mem_size memory_size = MEM_2G;
312 enum hws_ddr_freq freq = init_freq;
Marek Behúnf8bf75f2017-06-09 19:28:40 +0200313 enum hws_timing timing;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100314 u32 cs_mask = 0;
315 u32 cl_value = 0, cwl_val = 0;
316 u32 refresh_interval_cnt = 0, bus_cnt = 0, adll_tap = 0;
317 enum hws_access_type access_type = ACCESS_TYPE_UNICAST;
318 u32 data_read[MAX_INTERFACE_NUM];
319 struct hws_topology_map *tm = ddr3_get_topology_map();
Chris Packhamae806142018-01-18 17:16:07 +1300320 u32 odt_config = g_odt_config_2cs;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100321
322 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
323 ("Init_controller, do_mrs_phy=%d, is_ctrl64_bit=%d\n",
324 init_cntr_prm->do_mrs_phy,
325 init_cntr_prm->is_ctrl64_bit));
326
327 if (init_cntr_prm->init_phy == 1) {
328 CHECK_STATUS(ddr3_tip_configure_phy(dev_num));
329 }
330
331 if (generic_init_controller == 1) {
332 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
333 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
334 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
335 ("active IF %d\n", if_id));
336 mem_mask = 0;
337 for (bus_index = 0;
338 bus_index < GET_TOPOLOGY_NUM_OF_BUSES();
339 bus_index++) {
340 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
341 mem_mask |=
342 tm->interface_params[if_id].
343 as_bus_params[bus_index].mirror_enable_bitmask;
344 }
345
346 if (mem_mask != 0) {
347 CHECK_STATUS(ddr3_tip_if_write
348 (dev_num, ACCESS_TYPE_MULTICAST,
349 if_id, CS_ENABLE_REG, 0,
350 0x8));
351 }
352
353 memory_size =
354 tm->interface_params[if_id].
355 memory_size;
356 speed_bin_index =
357 tm->interface_params[if_id].
358 speed_bin_index;
359 freq = init_freq;
360 t_refi =
361 (tm->interface_params[if_id].
362 interface_temp ==
363 HWS_TEMP_HIGH) ? TREFI_HIGH : TREFI_LOW;
364 t_refi *= 1000; /* psec */
365 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
366 ("memy_size %d speed_bin_ind %d freq %d t_refi %d\n",
367 memory_size, speed_bin_index, freq,
368 t_refi));
369 /* HCLK & CK CLK in 2:1[ps] */
370 /* t_ckclk is external clock */
371 t_ckclk = (MEGA / freq_val[freq]);
372 /* t_hclk is internal clock */
373 t_hclk = 2 * t_ckclk;
374 refresh_interval_cnt = t_refi / t_hclk; /* no units */
375 bus_width =
376 (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask)
377 == 1) ? (16) : (32);
378
379 if (init_cntr_prm->is_ctrl64_bit)
380 bus_width = 64;
381
382 data_value =
383 (refresh_interval_cnt | 0x4000 |
384 ((bus_width ==
385 32) ? 0x8000 : 0) | 0x1000000) & ~(1 << 26);
386
387 /* Interface Bus Width */
388 /* SRMode */
389 CHECK_STATUS(ddr3_tip_if_write
390 (dev_num, access_type, if_id,
391 SDRAM_CONFIGURATION_REG, data_value,
392 0x100ffff));
393
394 /* Interleave first command pre-charge enable (TBD) */
395 CHECK_STATUS(ddr3_tip_if_write
396 (dev_num, access_type, if_id,
397 SDRAM_OPEN_PAGE_CONTROL_REG, (1 << 10),
398 (1 << 10)));
399
400 /* PHY configuration */
401 /*
402 * Postamble Length = 1.5cc, Addresscntl to clk skew
403 * \BD, Preamble length normal, parralal ADLL enable
404 */
405 CHECK_STATUS(ddr3_tip_if_write
406 (dev_num, access_type, if_id,
407 DRAM_PHY_CONFIGURATION, 0x28, 0x3e));
408 if (init_cntr_prm->is_ctrl64_bit) {
409 /* positive edge */
410 CHECK_STATUS(ddr3_tip_if_write
411 (dev_num, access_type, if_id,
412 DRAM_PHY_CONFIGURATION, 0x0,
413 0xff80));
414 }
415
416 /* calibration block disable */
417 /* Xbar Read buffer select (for Internal access) */
418 CHECK_STATUS(ddr3_tip_if_write
419 (dev_num, access_type, if_id,
420 CALIB_MACHINE_CTRL_REG, 0x1200c,
421 0x7dffe01c));
422 CHECK_STATUS(ddr3_tip_if_write
423 (dev_num, access_type, if_id,
424 CALIB_MACHINE_CTRL_REG,
425 calibration_update_control << 3, 0x3 << 3));
426
427 /* Pad calibration control - enable */
428 CHECK_STATUS(ddr3_tip_if_write
429 (dev_num, access_type, if_id,
430 CALIB_MACHINE_CTRL_REG, 0x1, 0x1));
431
432 cs_mask = 0;
433 data_value = 0x7;
434 /*
435 * Address ctrl \96 Part of the Generic code
436 * The next configuration is done:
437 * 1) Memory Size
438 * 2) Bus_width
439 * 3) CS#
440 * 4) Page Number
441 * 5) t_faw
442 * Per Dunit get from the Map_topology the parameters:
443 * Bus_width
444 * t_faw is per Dunit not per CS
445 */
446 page_size =
447 (tm->interface_params[if_id].
448 bus_width ==
449 BUS_WIDTH_8) ? page_param[memory_size].
450 page_size_8bit : page_param[memory_size].
451 page_size_16bit;
452
453 t_faw =
454 (page_size == 1) ? speed_bin_table(speed_bin_index,
455 SPEED_BIN_TFAW1K)
456 : speed_bin_table(speed_bin_index,
457 SPEED_BIN_TFAW2K);
458
459 data_value = TIME_2_CLOCK_CYCLES(t_faw, t_ckclk);
460 data_value = data_value << 24;
461 CHECK_STATUS(ddr3_tip_if_write
462 (dev_num, access_type, if_id,
463 SDRAM_ACCESS_CONTROL_REG, data_value,
464 0x7f000000));
465
466 data_value =
467 (tm->interface_params[if_id].
468 bus_width == BUS_WIDTH_8) ? 0 : 1;
469
470 /* create merge cs mask for all cs available in dunit */
471 for (bus_cnt = 0;
472 bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
473 bus_cnt++) {
474 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
475 cs_mask |=
476 tm->interface_params[if_id].
477 as_bus_params[bus_cnt].cs_bitmask;
478 }
479 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
480 ("Init_controller IF %d cs_mask %d\n",
481 if_id, cs_mask));
482 /*
483 * Configure the next upon the Map Topology \96 If the
484 * Dunit is CS0 Configure CS0 if it is multi CS
485 * configure them both: The Bust_width it\92s the
486 * Memory Bus width \96 x8 or x16
487 */
488 for (cs_cnt = 0; cs_cnt < NUM_OF_CS; cs_cnt++) {
489 ddr3_tip_configure_cs(dev_num, if_id, cs_cnt,
490 ((cs_mask & (1 << cs_cnt)) ? 1
491 : 0));
492 }
493
494 if (init_cntr_prm->do_mrs_phy) {
495 /*
496 * MR0 \96 Part of the Generic code
497 * The next configuration is done:
498 * 1) Burst Length
499 * 2) CAS Latency
500 * get for each dunit what is it Speed_bin &
501 * Target Frequency. From those both parameters
502 * get the appropriate Cas_l from the CL table
503 */
504 cl_value =
505 tm->interface_params[if_id].
506 cas_l;
507 cwl_val =
508 tm->interface_params[if_id].
509 cas_wl;
510 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
511 ("cl_value 0x%x cwl_val 0x%x\n",
512 cl_value, cwl_val));
Chris Packham5450f0c2018-01-18 17:16:10 +1300513 t_wr = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index,
514 SPEED_BIN_TWR),
515 t_ckclk);
Stefan Roese5ffceb82015-03-26 15:36:56 +0100516 data_value =
517 ((cl_mask_table[cl_value] & 0x1) << 2) |
518 ((cl_mask_table[cl_value] & 0xe) << 3);
519 CHECK_STATUS(ddr3_tip_if_write
520 (dev_num, access_type, if_id,
521 MR0_REG, data_value,
522 (0x7 << 4) | (1 << 2)));
523 CHECK_STATUS(ddr3_tip_if_write
524 (dev_num, access_type, if_id,
Chris Packham5450f0c2018-01-18 17:16:10 +1300525 MR0_REG, twr_mask_table[t_wr + 1] << 9,
526 (0x7 << 9)));
527
Stefan Roese5ffceb82015-03-26 15:36:56 +0100528
529 /*
530 * MR1: Set RTT and DIC Design GL values
531 * configured by user
532 */
533 CHECK_STATUS(ddr3_tip_if_write
534 (dev_num, ACCESS_TYPE_MULTICAST,
535 PARAM_NOT_CARE, MR1_REG,
536 g_dic | g_rtt_nom, 0x266));
537
538 /* MR2 - Part of the Generic code */
539 /*
540 * The next configuration is done:
541 * 1) SRT
542 * 2) CAS Write Latency
543 */
544 data_value = (cwl_mask_table[cwl_val] << 3);
545 data_value |=
546 ((tm->interface_params[if_id].
547 interface_temp ==
548 HWS_TEMP_HIGH) ? (1 << 7) : 0);
549 CHECK_STATUS(ddr3_tip_if_write
550 (dev_num, access_type, if_id,
551 MR2_REG, data_value,
552 (0x7 << 3) | (0x1 << 7) | (0x3 <<
553 9)));
554 }
555
556 ddr3_tip_write_odt(dev_num, access_type, if_id,
557 cl_value, cwl_val);
558 ddr3_tip_set_timing(dev_num, access_type, if_id, freq);
559
560 CHECK_STATUS(ddr3_tip_if_write
561 (dev_num, access_type, if_id,
562 DUNIT_CONTROL_HIGH_REG, 0x177,
563 0x1000177));
564
565 if (init_cntr_prm->is_ctrl64_bit) {
566 /* disable 0.25 cc delay */
567 CHECK_STATUS(ddr3_tip_if_write
568 (dev_num, access_type, if_id,
569 DUNIT_CONTROL_HIGH_REG, 0x0,
570 0x800));
571 }
572
573 /* reset bit 7 */
574 CHECK_STATUS(ddr3_tip_if_write
575 (dev_num, access_type, if_id,
576 DUNIT_CONTROL_HIGH_REG,
577 (init_cntr_prm->msys_init << 7), (1 << 7)));
578
Chris Packhamae806142018-01-18 17:16:07 +1300579 /* calculate number of CS (per interface) */
580 CHECK_STATUS(calc_cs_num
581 (dev_num, if_id, &cs_num));
Marek Behúnf8bf75f2017-06-09 19:28:40 +0200582 timing = tm->interface_params[if_id].timing;
583
Stefan Roese5ffceb82015-03-26 15:36:56 +0100584 if (mode2_t != 0xff) {
585 t2t = mode2_t;
Marek Behúnf8bf75f2017-06-09 19:28:40 +0200586 } else if (timing != HWS_TIM_DEFAULT) {
587 /* Board topology map is forcing timing */
588 t2t = (timing == HWS_TIM_2T) ? 1 : 0;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100589 } else {
Stefan Roese5ffceb82015-03-26 15:36:56 +0100590 t2t = (cs_num == 1) ? 0 : 1;
591 }
592
593 CHECK_STATUS(ddr3_tip_if_write
594 (dev_num, access_type, if_id,
595 DDR_CONTROL_LOW_REG, t2t << 3,
596 0x3 << 3));
597 /* move the block to ddr3_tip_set_timing - start */
Chris Packham5450f0c2018-01-18 17:16:10 +1300598 t_pd = TIMES_9_TREFI_CYCLES;
599 txpdll = GET_MAX_VALUE(t_ckclk * 10,
600 speed_bin_table(speed_bin_index,
601 SPEED_BIN_TXPDLL));
Stefan Roese5ffceb82015-03-26 15:36:56 +0100602 txpdll = CEIL_DIVIDE((txpdll - 1), t_ckclk);
603 CHECK_STATUS(ddr3_tip_if_write
604 (dev_num, access_type, if_id,
Chris Packham5450f0c2018-01-18 17:16:10 +1300605 DDR_TIMING_REG, txpdll << 4 | t_pd,
606 0x1f << 4 | 0xf));
Stefan Roese5ffceb82015-03-26 15:36:56 +0100607 CHECK_STATUS(ddr3_tip_if_write
608 (dev_num, access_type, if_id,
609 DDR_TIMING_REG, 0x28 << 9, 0x3f << 9));
610 CHECK_STATUS(ddr3_tip_if_write
611 (dev_num, access_type, if_id,
612 DDR_TIMING_REG, 0xa << 21, 0xff << 21));
613
614 /* move the block to ddr3_tip_set_timing - end */
615 /* AUTO_ZQC_TIMING */
616 CHECK_STATUS(ddr3_tip_if_write
617 (dev_num, access_type, if_id,
618 TIMING_REG, (AUTO_ZQC_TIMING | (2 << 20)),
619 0x3fffff));
620 CHECK_STATUS(ddr3_tip_if_read
621 (dev_num, access_type, if_id,
622 DRAM_PHY_CONFIGURATION, data_read, 0x30));
623 data_value =
624 (data_read[if_id] == 0) ? (1 << 11) : 0;
625 CHECK_STATUS(ddr3_tip_if_write
626 (dev_num, access_type, if_id,
627 DUNIT_CONTROL_HIGH_REG, data_value,
628 (1 << 11)));
629
630 /* Set Active control for ODT write transactions */
Chris Packhamae806142018-01-18 17:16:07 +1300631 if (cs_num == 1)
632 odt_config = g_odt_config_1cs;
Stefan Roese5ffceb82015-03-26 15:36:56 +0100633 CHECK_STATUS(ddr3_tip_if_write
634 (dev_num, ACCESS_TYPE_MULTICAST,
Chris Packhamae806142018-01-18 17:16:07 +1300635 PARAM_NOT_CARE, 0x1494, odt_config,
Stefan Roese5ffceb82015-03-26 15:36:56 +0100636 MASK_ALL_BITS));
637 }
638 } else {
639#ifdef STATIC_ALGO_SUPPORT
640 CHECK_STATUS(ddr3_tip_static_init_controller(dev_num));
641#if defined(CONFIG_ARMADA_38X) || defined(CONFIG_ARMADA_39X)
642 CHECK_STATUS(ddr3_tip_static_phy_init_controller(dev_num));
643#endif
644#endif /* STATIC_ALGO_SUPPORT */
645 }
646
647 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
648 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
649 CHECK_STATUS(ddr3_tip_rank_control(dev_num, if_id));
650
651 if (init_cntr_prm->do_mrs_phy) {
652 CHECK_STATUS(ddr3_tip_pad_inv(dev_num, if_id));
653 }
654
655 /* Pad calibration control - disable */
656 CHECK_STATUS(ddr3_tip_if_write
657 (dev_num, access_type, if_id,
658 CALIB_MACHINE_CTRL_REG, 0x0, 0x1));
659 CHECK_STATUS(ddr3_tip_if_write
660 (dev_num, access_type, if_id,
661 CALIB_MACHINE_CTRL_REG,
662 calibration_update_control << 3, 0x3 << 3));
663 }
664
665 CHECK_STATUS(ddr3_tip_enable_init_sequence(dev_num));
666
667 if (delay_enable != 0) {
668 adll_tap = MEGA / (freq_val[freq] * 64);
669 ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap);
670 }
671
672 return MV_OK;
673}
674
675/*
676 * Load Topology map
677 */
678int hws_ddr3_tip_load_topology_map(u32 dev_num, struct hws_topology_map *tm)
679{
680 enum hws_speed_bin speed_bin_index;
681 enum hws_ddr_freq freq = DDR_FREQ_LIMIT;
682 u32 if_id;
683
684 freq_val[DDR_FREQ_LOW_FREQ] = dfs_low_freq;
685 tm = ddr3_get_topology_map();
686 CHECK_STATUS(ddr3_tip_get_first_active_if
687 ((u8)dev_num, tm->if_act_mask,
688 &first_active_if));
689 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
690 ("board IF_Mask=0x%x num_of_bus_per_interface=0x%x\n",
691 tm->if_act_mask,
692 tm->num_of_bus_per_interface));
693
694 /*
695 * if CL, CWL values are missing in topology map, then fill them
696 * according to speedbin tables
697 */
698 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
699 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
700 speed_bin_index =
701 tm->interface_params[if_id].speed_bin_index;
702 /* TBD memory frequency of interface 0 only is used ! */
703 freq = tm->interface_params[first_active_if].memory_freq;
704
705 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
706 ("speed_bin_index =%d freq=%d cl=%d cwl=%d\n",
707 speed_bin_index, freq_val[freq],
708 tm->interface_params[if_id].
709 cas_l,
710 tm->interface_params[if_id].
711 cas_wl));
712
713 if (tm->interface_params[if_id].cas_l == 0) {
714 tm->interface_params[if_id].cas_l =
715 cas_latency_table[speed_bin_index].cl_val[freq];
716 }
717
718 if (tm->interface_params[if_id].cas_wl == 0) {
719 tm->interface_params[if_id].cas_wl =
720 cas_write_latency_table[speed_bin_index].cl_val[freq];
721 }
722 }
723
724 return MV_OK;
725}
726
727/*
728 * RANK Control Flow
729 */
730static int ddr3_tip_rank_control(u32 dev_num, u32 if_id)
731{
732 u32 data_value = 0, bus_cnt;
733 struct hws_topology_map *tm = ddr3_get_topology_map();
734
735 for (bus_cnt = 1; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) {
736 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
737 if ((tm->interface_params[if_id].
738 as_bus_params[0].cs_bitmask !=
739 tm->interface_params[if_id].
740 as_bus_params[bus_cnt].cs_bitmask) ||
741 (tm->interface_params[if_id].
742 as_bus_params[0].mirror_enable_bitmask !=
743 tm->interface_params[if_id].
744 as_bus_params[bus_cnt].mirror_enable_bitmask))
745 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
746 ("WARNING:Wrong configuration for pup #%d CS mask and CS mirroring for all pups should be the same\n",
747 bus_cnt));
748 }
749
750 data_value |= tm->interface_params[if_id].
751 as_bus_params[0].cs_bitmask;
752 data_value |= tm->interface_params[if_id].
753 as_bus_params[0].mirror_enable_bitmask << 4;
754
755 CHECK_STATUS(ddr3_tip_if_write
756 (dev_num, ACCESS_TYPE_UNICAST, if_id, RANK_CTRL_REG,
757 data_value, 0xff));
758
759 return MV_OK;
760}
761
762/*
763 * PAD Inverse Flow
764 */
765static int ddr3_tip_pad_inv(u32 dev_num, u32 if_id)
766{
767 u32 bus_cnt, data_value, ck_swap_pup_ctrl;
768 struct hws_topology_map *tm = ddr3_get_topology_map();
769
770 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) {
771 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
772 if (tm->interface_params[if_id].
773 as_bus_params[bus_cnt].is_dqs_swap == 1) {
774 /* dqs swap */
775 ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST,
776 if_id, bus_cnt,
777 DDR_PHY_DATA,
778 PHY_CONTROL_PHY_REG, 0xc0,
779 0xc0);
780 }
781
782 if (tm->interface_params[if_id].
783 as_bus_params[bus_cnt].is_ck_swap == 1) {
784 if (bus_cnt <= 1)
785 data_value = 0x5 << 2;
786 else
787 data_value = 0xa << 2;
788
789 /* mask equals data */
790 /* ck swap pup is only control pup #0 ! */
791 ck_swap_pup_ctrl = 0;
792 ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST,
793 if_id, ck_swap_pup_ctrl,
794 DDR_PHY_CONTROL,
795 PHY_CONTROL_PHY_REG,
796 data_value, data_value);
797 }
798 }
799
800 return MV_OK;
801}
802
803/*
804 * Run Training Flow
805 */
806int hws_ddr3_tip_run_alg(u32 dev_num, enum hws_algo_type algo_type)
807{
808 int ret = MV_OK, ret_tune = MV_OK;
809
810#ifdef ODT_TEST_SUPPORT
811 if (finger_test == 1)
812 return odt_test(dev_num, algo_type);
813#endif
814
815 if (algo_type == ALGO_TYPE_DYNAMIC) {
816 ret = ddr3_tip_ddr3_auto_tune(dev_num);
817 } else {
818#ifdef STATIC_ALGO_SUPPORT
819 {
820 enum hws_ddr_freq freq;
821 freq = init_freq;
822
823 /* add to mask */
824 if (is_adll_calib_before_init != 0) {
825 printf("with adll calib before init\n");
826 adll_calibration(dev_num, ACCESS_TYPE_MULTICAST,
827 0, freq);
828 }
829 /*
830 * Frequency per interface is not relevant,
831 * only interface 0
832 */
833 ret = ddr3_tip_run_static_alg(dev_num,
834 freq);
835 }
836#endif
837 }
838
839 if (ret != MV_OK) {
840 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
841 ("Run_alg: tuning failed %d\n", ret_tune));
842 }
843
844 return ret;
845}
846
847#ifdef ODT_TEST_SUPPORT
848/*
849 * ODT Test
850 */
851static int odt_test(u32 dev_num, enum hws_algo_type algo_type)
852{
853 int ret = MV_OK, ret_tune = MV_OK;
854 int pfinger_val = 0, nfinger_val;
855
856 for (pfinger_val = p_finger_start; pfinger_val <= p_finger_end;
857 pfinger_val += p_finger_step) {
858 for (nfinger_val = n_finger_start; nfinger_val <= n_finger_end;
859 nfinger_val += n_finger_step) {
860 if (finger_test != 0) {
861 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
862 ("pfinger_val %d nfinger_val %d\n",
863 pfinger_val, nfinger_val));
864 p_finger = pfinger_val;
865 n_finger = nfinger_val;
866 }
867
868 if (algo_type == ALGO_TYPE_DYNAMIC) {
869 ret = ddr3_tip_ddr3_auto_tune(dev_num);
870 } else {
871 /*
872 * Frequency per interface is not relevant,
873 * only interface 0
874 */
875 ret = ddr3_tip_run_static_alg(dev_num,
876 init_freq);
877 }
878 }
879 }
880
881 if (ret_tune != MV_OK) {
882 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
883 ("Run_alg: tuning failed %d\n", ret_tune));
884 ret = (ret == MV_OK) ? ret_tune : ret;
885 }
886
887 return ret;
888}
889#endif
890
891/*
892 * Select Controller
893 */
894int hws_ddr3_tip_select_ddr_controller(u32 dev_num, int enable)
895{
896 if (config_func_info[dev_num].tip_dunit_mux_select_func != NULL) {
897 return config_func_info[dev_num].
898 tip_dunit_mux_select_func((u8)dev_num, enable);
899 }
900
901 return MV_FAIL;
902}
903
904/*
905 * Dunit Register Write
906 */
907int ddr3_tip_if_write(u32 dev_num, enum hws_access_type interface_access,
908 u32 if_id, u32 reg_addr, u32 data_value, u32 mask)
909{
910 if (config_func_info[dev_num].tip_dunit_write_func != NULL) {
911 return config_func_info[dev_num].
912 tip_dunit_write_func((u8)dev_num, interface_access,
913 if_id, reg_addr,
914 data_value, mask);
915 }
916
917 return MV_FAIL;
918}
919
920/*
921 * Dunit Register Read
922 */
923int ddr3_tip_if_read(u32 dev_num, enum hws_access_type interface_access,
924 u32 if_id, u32 reg_addr, u32 *data, u32 mask)
925{
926 if (config_func_info[dev_num].tip_dunit_read_func != NULL) {
927 return config_func_info[dev_num].
928 tip_dunit_read_func((u8)dev_num, interface_access,
929 if_id, reg_addr,
930 data, mask);
931 }
932
933 return MV_FAIL;
934}
935
936/*
937 * Dunit Register Polling
938 */
939int ddr3_tip_if_polling(u32 dev_num, enum hws_access_type access_type,
940 u32 if_id, u32 exp_value, u32 mask, u32 offset,
941 u32 poll_tries)
942{
943 u32 poll_cnt = 0, interface_num = 0, start_if, end_if;
944 u32 read_data[MAX_INTERFACE_NUM];
945 int ret;
946 int is_fail = 0, is_if_fail;
947 struct hws_topology_map *tm = ddr3_get_topology_map();
948
949 if (access_type == ACCESS_TYPE_MULTICAST) {
950 start_if = 0;
951 end_if = MAX_INTERFACE_NUM - 1;
952 } else {
953 start_if = if_id;
954 end_if = if_id;
955 }
956
957 for (interface_num = start_if; interface_num <= end_if; interface_num++) {
958 /* polling bit 3 for n times */
959 VALIDATE_ACTIVE(tm->if_act_mask, interface_num);
960
961 is_if_fail = 0;
962 for (poll_cnt = 0; poll_cnt < poll_tries; poll_cnt++) {
963 ret =
964 ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST,
965 interface_num, offset, read_data,
966 mask);
967 if (ret != MV_OK)
968 return ret;
969
970 if (read_data[interface_num] == exp_value)
971 break;
972 }
973
974 if (poll_cnt >= poll_tries) {
975 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
976 ("max poll IF #%d\n", interface_num));
977 is_fail = 1;
978 is_if_fail = 1;
979 }
980
981 training_result[training_stage][interface_num] =
982 (is_if_fail == 1) ? TEST_FAILED : TEST_SUCCESS;
983 }
984
985 return (is_fail == 0) ? MV_OK : MV_FAIL;
986}
987
988/*
989 * Bus read access
990 */
991int ddr3_tip_bus_read(u32 dev_num, u32 if_id,
992 enum hws_access_type phy_access, u32 phy_id,
993 enum hws_ddr_phy phy_type, u32 reg_addr, u32 *data)
994{
995 u32 bus_index = 0;
996 u32 data_read[MAX_INTERFACE_NUM];
997 struct hws_topology_map *tm = ddr3_get_topology_map();
998
999 if (phy_access == ACCESS_TYPE_MULTICAST) {
1000 for (bus_index = 0; bus_index < GET_TOPOLOGY_NUM_OF_BUSES();
1001 bus_index++) {
1002 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
1003 CHECK_STATUS(ddr3_tip_bus_access
1004 (dev_num, ACCESS_TYPE_UNICAST,
1005 if_id, ACCESS_TYPE_UNICAST,
1006 bus_index, phy_type, reg_addr, 0,
1007 OPERATION_READ));
1008 CHECK_STATUS(ddr3_tip_if_read
1009 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1010 PHY_REG_FILE_ACCESS, data_read,
1011 MASK_ALL_BITS));
1012 data[bus_index] = (data_read[if_id] & 0xffff);
1013 }
1014 } else {
1015 CHECK_STATUS(ddr3_tip_bus_access
1016 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1017 phy_access, phy_id, phy_type, reg_addr, 0,
1018 OPERATION_READ));
1019 CHECK_STATUS(ddr3_tip_if_read
1020 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1021 PHY_REG_FILE_ACCESS, data_read, MASK_ALL_BITS));
1022
1023 /*
1024 * only 16 lsb bit are valid in Phy (each register is different,
1025 * some can actually be less than 16 bits)
1026 */
1027 *data = (data_read[if_id] & 0xffff);
1028 }
1029
1030 return MV_OK;
1031}
1032
1033/*
1034 * Bus write access
1035 */
1036int ddr3_tip_bus_write(u32 dev_num, enum hws_access_type interface_access,
1037 u32 if_id, enum hws_access_type phy_access,
1038 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr,
1039 u32 data_value)
1040{
1041 CHECK_STATUS(ddr3_tip_bus_access
1042 (dev_num, interface_access, if_id, phy_access,
1043 phy_id, phy_type, reg_addr, data_value, OPERATION_WRITE));
1044
1045 return MV_OK;
1046}
1047
1048/*
1049 * Bus access routine (relevant for both read & write)
1050 */
1051static int ddr3_tip_bus_access(u32 dev_num, enum hws_access_type interface_access,
1052 u32 if_id, enum hws_access_type phy_access,
1053 u32 phy_id, enum hws_ddr_phy phy_type, u32 reg_addr,
1054 u32 data_value, enum hws_operation oper_type)
1055{
1056 u32 addr_low = 0x3f & reg_addr;
1057 u32 addr_hi = ((0xc0 & reg_addr) >> 6);
1058 u32 data_p1 =
1059 (oper_type << 30) + (addr_hi << 28) + (phy_access << 27) +
1060 (phy_type << 26) + (phy_id << 22) + (addr_low << 16) +
1061 (data_value & 0xffff);
1062 u32 data_p2 = data_p1 + (1 << 31);
1063 u32 start_if, end_if;
1064 struct hws_topology_map *tm = ddr3_get_topology_map();
1065
1066 CHECK_STATUS(ddr3_tip_if_write
1067 (dev_num, interface_access, if_id, PHY_REG_FILE_ACCESS,
1068 data_p1, MASK_ALL_BITS));
1069 CHECK_STATUS(ddr3_tip_if_write
1070 (dev_num, interface_access, if_id, PHY_REG_FILE_ACCESS,
1071 data_p2, MASK_ALL_BITS));
1072
1073 if (interface_access == ACCESS_TYPE_UNICAST) {
1074 start_if = if_id;
1075 end_if = if_id;
1076 } else {
1077 start_if = 0;
1078 end_if = MAX_INTERFACE_NUM - 1;
1079 }
1080
1081 /* polling for read/write execution done */
1082 for (if_id = start_if; if_id <= end_if; if_id++) {
1083 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1084 CHECK_STATUS(is_bus_access_done
1085 (dev_num, if_id, PHY_REG_FILE_ACCESS, 31));
1086 }
1087
1088 return MV_OK;
1089}
1090
1091/*
1092 * Check bus access done
1093 */
1094static int is_bus_access_done(u32 dev_num, u32 if_id, u32 dunit_reg_adrr,
1095 u32 bit)
1096{
1097 u32 rd_data = 1;
1098 u32 cnt = 0;
1099 u32 data_read[MAX_INTERFACE_NUM];
1100
1101 CHECK_STATUS(ddr3_tip_if_read
1102 (dev_num, ACCESS_TYPE_UNICAST, if_id, dunit_reg_adrr,
1103 data_read, MASK_ALL_BITS));
1104 rd_data = data_read[if_id];
1105 rd_data &= (1 << bit);
1106
1107 while (rd_data != 0) {
1108 if (cnt++ >= MAX_POLLING_ITERATIONS)
1109 break;
1110
1111 CHECK_STATUS(ddr3_tip_if_read
1112 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1113 dunit_reg_adrr, data_read, MASK_ALL_BITS));
1114 rd_data = data_read[if_id];
1115 rd_data &= (1 << bit);
1116 }
1117
1118 if (cnt < MAX_POLLING_ITERATIONS)
1119 return MV_OK;
1120 else
1121 return MV_FAIL;
1122}
1123
1124/*
1125 * Phy read-modify-write
1126 */
1127int ddr3_tip_bus_read_modify_write(u32 dev_num, enum hws_access_type access_type,
1128 u32 interface_id, u32 phy_id,
1129 enum hws_ddr_phy phy_type, u32 reg_addr,
1130 u32 data_value, u32 reg_mask)
1131{
1132 u32 data_val = 0, if_id, start_if, end_if;
1133 struct hws_topology_map *tm = ddr3_get_topology_map();
1134
1135 if (access_type == ACCESS_TYPE_MULTICAST) {
1136 start_if = 0;
1137 end_if = MAX_INTERFACE_NUM - 1;
1138 } else {
1139 start_if = interface_id;
1140 end_if = interface_id;
1141 }
1142
1143 for (if_id = start_if; if_id <= end_if; if_id++) {
1144 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1145 CHECK_STATUS(ddr3_tip_bus_read
1146 (dev_num, if_id, ACCESS_TYPE_UNICAST, phy_id,
1147 phy_type, reg_addr, &data_val));
1148 data_value = (data_val & (~reg_mask)) | (data_value & reg_mask);
1149 CHECK_STATUS(ddr3_tip_bus_write
1150 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1151 ACCESS_TYPE_UNICAST, phy_id, phy_type, reg_addr,
1152 data_value));
1153 }
1154
1155 return MV_OK;
1156}
1157
1158/*
1159 * ADLL Calibration
1160 */
1161int adll_calibration(u32 dev_num, enum hws_access_type access_type,
1162 u32 if_id, enum hws_ddr_freq frequency)
1163{
1164 struct hws_tip_freq_config_info freq_config_info;
1165 u32 bus_cnt = 0;
1166 struct hws_topology_map *tm = ddr3_get_topology_map();
1167
1168 /* Reset Diver_b assert -> de-assert */
1169 CHECK_STATUS(ddr3_tip_if_write
1170 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG,
1171 0, 0x10000000));
1172 mdelay(10);
1173 CHECK_STATUS(ddr3_tip_if_write
1174 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG,
1175 0x10000000, 0x10000000));
1176
1177 if (config_func_info[dev_num].tip_get_freq_config_info_func != NULL) {
1178 CHECK_STATUS(config_func_info[dev_num].
1179 tip_get_freq_config_info_func((u8)dev_num, frequency,
1180 &freq_config_info));
1181 } else {
1182 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1183 ("tip_get_freq_config_info_func is NULL"));
1184 return MV_NOT_INITIALIZED;
1185 }
1186
1187 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES(); bus_cnt++) {
1188 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
1189 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1190 (dev_num, access_type, if_id, bus_cnt,
1191 DDR_PHY_DATA, BW_PHY_REG,
1192 freq_config_info.bw_per_freq << 8, 0x700));
1193 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1194 (dev_num, access_type, if_id, bus_cnt,
1195 DDR_PHY_DATA, RATE_PHY_REG,
1196 freq_config_info.rate_per_freq, 0x7));
1197 }
1198
1199 /* DUnit to Phy drive post edge, ADLL reset assert de-assert */
1200 CHECK_STATUS(ddr3_tip_if_write
1201 (dev_num, access_type, if_id, DRAM_PHY_CONFIGURATION,
1202 0, (0x80000000 | 0x40000000)));
1203 mdelay(100 / (freq_val[frequency] / freq_val[DDR_FREQ_LOW_FREQ]));
1204 CHECK_STATUS(ddr3_tip_if_write
1205 (dev_num, access_type, if_id, DRAM_PHY_CONFIGURATION,
1206 (0x80000000 | 0x40000000), (0x80000000 | 0x40000000)));
1207
1208 /* polling for ADLL Done */
1209 if (ddr3_tip_if_polling(dev_num, access_type, if_id,
1210 0x3ff03ff, 0x3ff03ff, PHY_LOCK_STATUS_REG,
1211 MAX_POLLING_ITERATIONS) != MV_OK) {
1212 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1213 ("Freq_set: DDR3 poll failed(1)"));
1214 }
1215
1216 /* pup data_pup reset assert-> deassert */
1217 CHECK_STATUS(ddr3_tip_if_write
1218 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG,
1219 0, 0x60000000));
1220 mdelay(10);
1221 CHECK_STATUS(ddr3_tip_if_write
1222 (dev_num, access_type, if_id, SDRAM_CONFIGURATION_REG,
1223 0x60000000, 0x60000000));
1224
1225 return MV_OK;
1226}
1227
1228int ddr3_tip_freq_set(u32 dev_num, enum hws_access_type access_type,
1229 u32 if_id, enum hws_ddr_freq frequency)
1230{
1231 u32 cl_value = 0, cwl_value = 0, mem_mask = 0, val = 0,
1232 bus_cnt = 0, t_hclk = 0, t_wr = 0,
1233 refresh_interval_cnt = 0, cnt_id;
Chris Packham5450f0c2018-01-18 17:16:10 +13001234 u32 t_ckclk;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001235 u32 t_refi = 0, end_if, start_if;
1236 u32 bus_index = 0;
1237 int is_dll_off = 0;
1238 enum hws_speed_bin speed_bin_index = 0;
1239 struct hws_tip_freq_config_info freq_config_info;
1240 enum hws_result *flow_result = training_result[training_stage];
1241 u32 adll_tap = 0;
1242 u32 cs_mask[MAX_INTERFACE_NUM];
1243 struct hws_topology_map *tm = ddr3_get_topology_map();
1244
1245 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
1246 ("dev %d access %d IF %d freq %d\n", dev_num,
1247 access_type, if_id, frequency));
1248
1249 if (frequency == DDR_FREQ_LOW_FREQ)
1250 is_dll_off = 1;
1251 if (access_type == ACCESS_TYPE_MULTICAST) {
1252 start_if = 0;
1253 end_if = MAX_INTERFACE_NUM - 1;
1254 } else {
1255 start_if = if_id;
1256 end_if = if_id;
1257 }
1258
1259 /* calculate interface cs mask - Oferb 4/11 */
1260 /* speed bin can be different for each interface */
1261 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1262 /* cs enable is active low */
1263 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1264 cs_mask[if_id] = CS_BIT_MASK;
1265 training_result[training_stage][if_id] = TEST_SUCCESS;
1266 ddr3_tip_calc_cs_mask(dev_num, if_id, effective_cs,
1267 &cs_mask[if_id]);
1268 }
1269
1270 /* speed bin can be different for each interface */
1271 /*
1272 * moti b - need to remove the loop for multicas access functions
1273 * and loop the unicast access functions
1274 */
1275 for (if_id = start_if; if_id <= end_if; if_id++) {
1276 if (IS_ACTIVE(tm->if_act_mask, if_id) == 0)
1277 continue;
1278
1279 flow_result[if_id] = TEST_SUCCESS;
1280 speed_bin_index =
1281 tm->interface_params[if_id].speed_bin_index;
1282 if (tm->interface_params[if_id].memory_freq ==
1283 frequency) {
1284 cl_value =
1285 tm->interface_params[if_id].cas_l;
1286 cwl_value =
1287 tm->interface_params[if_id].cas_wl;
1288 } else {
1289 cl_value =
1290 cas_latency_table[speed_bin_index].cl_val[frequency];
1291 cwl_value =
1292 cas_write_latency_table[speed_bin_index].
1293 cl_val[frequency];
1294 }
1295
1296 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
1297 ("Freq_set dev 0x%x access 0x%x if 0x%x freq 0x%x speed %d:\n\t",
1298 dev_num, access_type, if_id,
1299 frequency, speed_bin_index));
1300
1301 for (cnt_id = 0; cnt_id < DDR_FREQ_LIMIT; cnt_id++) {
1302 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE,
1303 ("%d ",
1304 cas_latency_table[speed_bin_index].
1305 cl_val[cnt_id]));
1306 }
1307
1308 DEBUG_TRAINING_IP(DEBUG_LEVEL_TRACE, ("\n"));
1309 mem_mask = 0;
1310 for (bus_index = 0; bus_index < GET_TOPOLOGY_NUM_OF_BUSES();
1311 bus_index++) {
1312 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
1313 mem_mask |=
1314 tm->interface_params[if_id].
1315 as_bus_params[bus_index].mirror_enable_bitmask;
1316 }
1317
1318 if (mem_mask != 0) {
Robert P. J. Dayc5b1e5d2016-09-07 14:27:59 -04001319 /* motib redundant in KW28 */
Stefan Roese5ffceb82015-03-26 15:36:56 +01001320 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1321 if_id,
1322 CS_ENABLE_REG, 0, 0x8));
1323 }
1324
1325 /* dll state after exiting SR */
1326 if (is_dll_off == 1) {
1327 CHECK_STATUS(ddr3_tip_if_write
1328 (dev_num, access_type, if_id,
1329 DFS_REG, 0x1, 0x1));
1330 } else {
1331 CHECK_STATUS(ddr3_tip_if_write
1332 (dev_num, access_type, if_id,
1333 DFS_REG, 0, 0x1));
1334 }
1335
1336 CHECK_STATUS(ddr3_tip_if_write
1337 (dev_num, access_type, if_id,
1338 DUNIT_MMASK_REG, 0, 0x1));
1339 /* DFS - block transactions */
1340 CHECK_STATUS(ddr3_tip_if_write
1341 (dev_num, access_type, if_id,
1342 DFS_REG, 0x2, 0x2));
1343
1344 /* disable ODT in case of dll off */
1345 if (is_dll_off == 1) {
1346 CHECK_STATUS(ddr3_tip_if_write
1347 (dev_num, access_type, if_id,
1348 0x1874, 0, 0x244));
1349 CHECK_STATUS(ddr3_tip_if_write
1350 (dev_num, access_type, if_id,
1351 0x1884, 0, 0x244));
1352 CHECK_STATUS(ddr3_tip_if_write
1353 (dev_num, access_type, if_id,
1354 0x1894, 0, 0x244));
1355 CHECK_STATUS(ddr3_tip_if_write
1356 (dev_num, access_type, if_id,
1357 0x18a4, 0, 0x244));
1358 }
1359
1360 /* DFS - Enter Self-Refresh */
1361 CHECK_STATUS(ddr3_tip_if_write
1362 (dev_num, access_type, if_id, DFS_REG, 0x4,
1363 0x4));
1364 /* polling on self refresh entry */
1365 if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST,
1366 if_id, 0x8, 0x8, DFS_REG,
1367 MAX_POLLING_ITERATIONS) != MV_OK) {
1368 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1369 ("Freq_set: DDR3 poll failed on SR entry\n"));
1370 }
1371
1372 /* PLL configuration */
1373 if (config_func_info[dev_num].tip_set_freq_divider_func != NULL) {
1374 config_func_info[dev_num].
1375 tip_set_freq_divider_func(dev_num, if_id,
1376 frequency);
1377 }
1378
1379 /* PLL configuration End */
1380
1381 /* adjust t_refi to new frequency */
1382 t_refi = (tm->interface_params[if_id].interface_temp ==
Chris Packham1324fab2018-01-18 17:16:08 +13001383 HWS_TEMP_HIGH) ? TREFI_HIGH : TREFI_LOW;
Stefan Roese5ffceb82015-03-26 15:36:56 +01001384 t_refi *= 1000; /*psec */
1385
1386 /* HCLK in[ps] */
1387 t_hclk = MEGA / (freq_val[frequency] / 2);
1388 refresh_interval_cnt = t_refi / t_hclk; /* no units */
1389 val = 0x4000 | refresh_interval_cnt;
1390 CHECK_STATUS(ddr3_tip_if_write
1391 (dev_num, access_type, if_id,
1392 SDRAM_CONFIGURATION_REG, val, 0x7fff));
1393
1394 /* DFS - CL/CWL/WR parameters after exiting SR */
1395 CHECK_STATUS(ddr3_tip_if_write
1396 (dev_num, access_type, if_id, DFS_REG,
1397 (cl_mask_table[cl_value] << 8), 0xf00));
1398 CHECK_STATUS(ddr3_tip_if_write
1399 (dev_num, access_type, if_id, DFS_REG,
1400 (cwl_mask_table[cwl_value] << 12), 0x7000));
Chris Packham5450f0c2018-01-18 17:16:10 +13001401
1402 t_ckclk = MEGA / freq_val[frequency];
1403 t_wr = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index,
1404 SPEED_BIN_TWR),
1405 t_ckclk);
1406
Stefan Roese5ffceb82015-03-26 15:36:56 +01001407 CHECK_STATUS(ddr3_tip_if_write
1408 (dev_num, access_type, if_id, DFS_REG,
1409 (twr_mask_table[t_wr + 1] << 16), 0x70000));
1410
1411 /* Restore original RTT values if returning from DLL OFF mode */
1412 if (is_dll_off == 1) {
1413 CHECK_STATUS(ddr3_tip_if_write
1414 (dev_num, access_type, if_id, 0x1874,
1415 g_dic | g_rtt_nom, 0x266));
1416 CHECK_STATUS(ddr3_tip_if_write
1417 (dev_num, access_type, if_id, 0x1884,
1418 g_dic | g_rtt_nom, 0x266));
1419 CHECK_STATUS(ddr3_tip_if_write
1420 (dev_num, access_type, if_id, 0x1894,
1421 g_dic | g_rtt_nom, 0x266));
1422 CHECK_STATUS(ddr3_tip_if_write
1423 (dev_num, access_type, if_id, 0x18a4,
1424 g_dic | g_rtt_nom, 0x266));
1425 }
1426
1427 /* Reset Diver_b assert -> de-assert */
1428 CHECK_STATUS(ddr3_tip_if_write
1429 (dev_num, access_type, if_id,
1430 SDRAM_CONFIGURATION_REG, 0, 0x10000000));
1431 mdelay(10);
1432 CHECK_STATUS(ddr3_tip_if_write
1433 (dev_num, access_type, if_id,
1434 SDRAM_CONFIGURATION_REG, 0x10000000, 0x10000000));
1435
1436 /* Adll configuration function of process and Frequency */
1437 if (config_func_info[dev_num].tip_get_freq_config_info_func != NULL) {
1438 CHECK_STATUS(config_func_info[dev_num].
1439 tip_get_freq_config_info_func(dev_num, frequency,
1440 &freq_config_info));
1441 }
1442 /* TBD check milo5 using device ID ? */
1443 for (bus_cnt = 0; bus_cnt < GET_TOPOLOGY_NUM_OF_BUSES();
1444 bus_cnt++) {
1445 VALIDATE_ACTIVE(tm->bus_act_mask, bus_cnt);
1446 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1447 (dev_num, ACCESS_TYPE_UNICAST,
1448 if_id, bus_cnt, DDR_PHY_DATA,
1449 0x92,
1450 freq_config_info.
1451 bw_per_freq << 8
1452 /*freq_mask[dev_num][frequency] << 8 */
1453 , 0x700));
1454 CHECK_STATUS(ddr3_tip_bus_read_modify_write
1455 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1456 bus_cnt, DDR_PHY_DATA, 0x94,
1457 freq_config_info.rate_per_freq, 0x7));
1458 }
1459
1460 /* DUnit to Phy drive post edge, ADLL reset assert de-assert */
1461 CHECK_STATUS(ddr3_tip_if_write
1462 (dev_num, access_type, if_id,
1463 DRAM_PHY_CONFIGURATION, 0,
1464 (0x80000000 | 0x40000000)));
1465 mdelay(100 / (freq_val[frequency] / freq_val[DDR_FREQ_LOW_FREQ]));
1466 CHECK_STATUS(ddr3_tip_if_write
1467 (dev_num, access_type, if_id,
1468 DRAM_PHY_CONFIGURATION, (0x80000000 | 0x40000000),
1469 (0x80000000 | 0x40000000)));
1470
1471 /* polling for ADLL Done */
1472 if (ddr3_tip_if_polling
1473 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0x3ff03ff,
1474 0x3ff03ff, PHY_LOCK_STATUS_REG,
1475 MAX_POLLING_ITERATIONS) != MV_OK) {
1476 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1477 ("Freq_set: DDR3 poll failed(1)\n"));
1478 }
1479
1480 /* pup data_pup reset assert-> deassert */
1481 CHECK_STATUS(ddr3_tip_if_write
1482 (dev_num, access_type, if_id,
1483 SDRAM_CONFIGURATION_REG, 0, 0x60000000));
1484 mdelay(10);
1485 CHECK_STATUS(ddr3_tip_if_write
1486 (dev_num, access_type, if_id,
1487 SDRAM_CONFIGURATION_REG, 0x60000000, 0x60000000));
1488
1489 /* Set proper timing params before existing Self-Refresh */
1490 ddr3_tip_set_timing(dev_num, access_type, if_id, frequency);
1491 if (delay_enable != 0) {
1492 adll_tap = MEGA / (freq_val[frequency] * 64);
1493 ddr3_tip_cmd_addr_init_delay(dev_num, adll_tap);
1494 }
1495
1496 /* Exit SR */
1497 CHECK_STATUS(ddr3_tip_if_write
1498 (dev_num, access_type, if_id, DFS_REG, 0,
1499 0x4));
1500 if (ddr3_tip_if_polling
1501 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x8, DFS_REG,
1502 MAX_POLLING_ITERATIONS) != MV_OK) {
1503 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1504 ("Freq_set: DDR3 poll failed(2)"));
1505 }
1506
1507 /* Refresh Command */
1508 CHECK_STATUS(ddr3_tip_if_write
1509 (dev_num, access_type, if_id,
1510 SDRAM_OPERATION_REG, 0x2, 0xf1f));
1511 if (ddr3_tip_if_polling
1512 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1f,
1513 SDRAM_OPERATION_REG, MAX_POLLING_ITERATIONS) != MV_OK) {
1514 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1515 ("Freq_set: DDR3 poll failed(3)"));
1516 }
1517
1518 /* Release DFS Block */
1519 CHECK_STATUS(ddr3_tip_if_write
1520 (dev_num, access_type, if_id, DFS_REG, 0,
1521 0x2));
1522 /* Controller to MBUS Retry - normal */
1523 CHECK_STATUS(ddr3_tip_if_write
1524 (dev_num, access_type, if_id, DUNIT_MMASK_REG,
1525 0x1, 0x1));
1526
1527 /* MRO: Burst Length 8, CL , Auto_precharge 0x16cc */
1528 val =
1529 ((cl_mask_table[cl_value] & 0x1) << 2) |
1530 ((cl_mask_table[cl_value] & 0xe) << 3);
1531 CHECK_STATUS(ddr3_tip_if_write
1532 (dev_num, access_type, if_id, MR0_REG,
1533 val, (0x7 << 4) | (1 << 2)));
1534 /* MR2: CWL = 10 , Auto Self-Refresh - disable */
1535 val = (cwl_mask_table[cwl_value] << 3);
1536 /*
1537 * nklein 24.10.13 - should not be here - leave value as set in
1538 * the init configuration val |= (1 << 9);
1539 * val |= ((tm->interface_params[if_id].
1540 * interface_temp == HWS_TEMP_HIGH) ? (1 << 7) : 0);
1541 */
1542 /* nklein 24.10.13 - see above comment */
1543 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1544 if_id, MR2_REG,
1545 val, (0x7 << 3)));
1546
1547 /* ODT TIMING */
1548 val = ((cl_value - cwl_value + 1) << 4) |
1549 ((cl_value - cwl_value + 6) << 8) |
1550 ((cl_value - 1) << 12) | ((cl_value + 6) << 16);
1551 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1552 if_id, ODT_TIMING_LOW,
1553 val, 0xffff0));
Chris Packhamae806142018-01-18 17:16:07 +13001554 val = 0x91 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001555 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1556 if_id, ODT_TIMING_HI_REG,
1557 val, 0xffff));
1558
1559 /* ODT Active */
1560 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1561 if_id,
1562 DUNIT_ODT_CONTROL_REG,
1563 0xf, 0xf));
1564
1565 /* re-write CL */
1566 val = ((cl_mask_table[cl_value] & 0x1) << 2) |
1567 ((cl_mask_table[cl_value] & 0xe) << 3);
1568 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1569 0, MR0_REG, val,
1570 (0x7 << 4) | (1 << 2)));
1571
1572 /* re-write CWL */
1573 val = (cwl_mask_table[cwl_value] << 3);
1574 CHECK_STATUS(ddr3_tip_write_mrs_cmd(dev_num, cs_mask, MRS2_CMD,
1575 val, (0x7 << 3)));
1576 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1577 0, MR2_REG, val, (0x7 << 3)));
1578
1579 if (mem_mask != 0) {
1580 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1581 if_id,
1582 CS_ENABLE_REG,
1583 1 << 3, 0x8));
1584 }
1585 }
1586
1587 return MV_OK;
1588}
1589
1590/*
1591 * Set ODT values
1592 */
1593static int ddr3_tip_write_odt(u32 dev_num, enum hws_access_type access_type,
1594 u32 if_id, u32 cl_value, u32 cwl_value)
1595{
1596 /* ODT TIMING */
1597 u32 val = (cl_value - cwl_value + 6);
1598
1599 val = ((cl_value - cwl_value + 1) << 4) | ((val & 0xf) << 8) |
1600 (((cl_value - 1) & 0xf) << 12) |
1601 (((cl_value + 6) & 0xf) << 16) | (((val & 0x10) >> 4) << 21);
1602 val |= (((cl_value - 1) >> 4) << 22) | (((cl_value + 6) >> 4) << 23);
1603
1604 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1605 ODT_TIMING_LOW, val, 0xffff0));
Chris Packhamae806142018-01-18 17:16:07 +13001606 val = 0x91 | ((cwl_value - 1) << 8) | ((cwl_value + 5) << 12);
Stefan Roese5ffceb82015-03-26 15:36:56 +01001607 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1608 ODT_TIMING_HI_REG, val, 0xffff));
1609 if (odt_additional == 1) {
1610 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type,
1611 if_id,
1612 SDRAM_ODT_CONTROL_HIGH_REG,
1613 0xf, 0xf));
1614 }
1615
1616 /* ODT Active */
1617 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1618 DUNIT_ODT_CONTROL_REG, 0xf, 0xf));
1619
1620 return MV_OK;
1621}
1622
1623/*
1624 * Set Timing values for training
1625 */
1626static int ddr3_tip_set_timing(u32 dev_num, enum hws_access_type access_type,
1627 u32 if_id, enum hws_ddr_freq frequency)
1628{
1629 u32 t_ckclk = 0, t_ras = 0;
1630 u32 t_rcd = 0, t_rp = 0, t_wr = 0, t_wtr = 0, t_rrd = 0, t_rtp = 0,
1631 t_rfc = 0, t_mod = 0;
1632 u32 val = 0, page_size = 0;
1633 enum hws_speed_bin speed_bin_index;
1634 enum hws_mem_size memory_size = MEM_2G;
1635 struct hws_topology_map *tm = ddr3_get_topology_map();
1636
1637 speed_bin_index = tm->interface_params[if_id].speed_bin_index;
1638 memory_size = tm->interface_params[if_id].memory_size;
1639 page_size =
1640 (tm->interface_params[if_id].bus_width ==
1641 BUS_WIDTH_8) ? page_param[memory_size].
1642 page_size_8bit : page_param[memory_size].page_size_16bit;
1643 t_ckclk = (MEGA / freq_val[frequency]);
1644 t_rrd = (page_size == 1) ? speed_bin_table(speed_bin_index,
1645 SPEED_BIN_TRRD1K) :
1646 speed_bin_table(speed_bin_index, SPEED_BIN_TRRD2K);
1647 t_rrd = GET_MAX_VALUE(t_ckclk * 4, t_rrd);
1648 t_rtp = GET_MAX_VALUE(t_ckclk * 4, speed_bin_table(speed_bin_index,
1649 SPEED_BIN_TRTP));
1650 t_wtr = GET_MAX_VALUE(t_ckclk * 4, speed_bin_table(speed_bin_index,
1651 SPEED_BIN_TWTR));
1652 t_ras = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index,
1653 SPEED_BIN_TRAS),
1654 t_ckclk);
1655 t_rcd = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index,
1656 SPEED_BIN_TRCD),
1657 t_ckclk);
1658 t_rp = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index,
1659 SPEED_BIN_TRP),
1660 t_ckclk);
1661 t_wr = TIME_2_CLOCK_CYCLES(speed_bin_table(speed_bin_index,
1662 SPEED_BIN_TWR),
1663 t_ckclk);
1664 t_wtr = TIME_2_CLOCK_CYCLES(t_wtr, t_ckclk);
1665 t_rrd = TIME_2_CLOCK_CYCLES(t_rrd, t_ckclk);
1666 t_rtp = TIME_2_CLOCK_CYCLES(t_rtp, t_ckclk);
1667 t_rfc = TIME_2_CLOCK_CYCLES(rfc_table[memory_size] * 1000, t_ckclk);
1668 t_mod = GET_MAX_VALUE(t_ckclk * 24, 15000);
1669 t_mod = TIME_2_CLOCK_CYCLES(t_mod, t_ckclk);
1670
1671 /* SDRAM Timing Low */
1672 val = (t_ras & 0xf) | (t_rcd << 4) | (t_rp << 8) | (t_wr << 12) |
1673 (t_wtr << 16) | (((t_ras & 0x30) >> 4) << 20) | (t_rrd << 24) |
1674 (t_rtp << 28);
1675 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1676 SDRAM_TIMING_LOW_REG, val, 0xff3fffff));
1677
1678 /* SDRAM Timing High */
1679 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1680 SDRAM_TIMING_HIGH_REG,
1681 t_rfc & 0x7f, 0x7f));
1682 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1683 SDRAM_TIMING_HIGH_REG,
1684 0x180, 0x180));
1685 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1686 SDRAM_TIMING_HIGH_REG,
1687 0x600, 0x600));
1688 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1689 SDRAM_TIMING_HIGH_REG,
1690 0x1800, 0xf800));
1691 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1692 SDRAM_TIMING_HIGH_REG,
1693 ((t_rfc & 0x380) >> 7) << 16, 0x70000));
1694 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1695 SDRAM_TIMING_HIGH_REG, 0,
1696 0x380000));
1697 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1698 SDRAM_TIMING_HIGH_REG,
1699 (t_mod & 0xf) << 25, 0x1e00000));
1700 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1701 SDRAM_TIMING_HIGH_REG,
1702 (t_mod >> 4) << 30, 0xc0000000));
1703 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1704 SDRAM_TIMING_HIGH_REG,
1705 0x16000000, 0x1e000000));
1706 CHECK_STATUS(ddr3_tip_if_write(dev_num, access_type, if_id,
1707 SDRAM_TIMING_HIGH_REG,
1708 0x40000000, 0xc0000000));
1709
1710 return MV_OK;
1711}
1712
1713/*
1714 * Mode Read
1715 */
1716int hws_ddr3_tip_mode_read(u32 dev_num, struct mode_info *mode_info)
1717{
1718 u32 ret;
1719
1720 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1721 MR0_REG, mode_info->reg_mr0, MASK_ALL_BITS);
1722 if (ret != MV_OK)
1723 return ret;
1724
1725 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1726 MR1_REG, mode_info->reg_mr1, MASK_ALL_BITS);
1727 if (ret != MV_OK)
1728 return ret;
1729
1730 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1731 MR2_REG, mode_info->reg_mr2, MASK_ALL_BITS);
1732 if (ret != MV_OK)
1733 return ret;
1734
1735 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1736 MR3_REG, mode_info->reg_mr2, MASK_ALL_BITS);
1737 if (ret != MV_OK)
1738 return ret;
1739
1740 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1741 READ_DATA_SAMPLE_DELAY, mode_info->read_data_sample,
1742 MASK_ALL_BITS);
1743 if (ret != MV_OK)
1744 return ret;
1745
1746 ret = ddr3_tip_if_read(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1747 READ_DATA_READY_DELAY, mode_info->read_data_ready,
1748 MASK_ALL_BITS);
1749 if (ret != MV_OK)
1750 return ret;
1751
1752 return MV_OK;
1753}
1754
1755/*
1756 * Get first active IF
1757 */
1758int ddr3_tip_get_first_active_if(u8 dev_num, u32 interface_mask,
1759 u32 *interface_id)
1760{
1761 u32 if_id;
1762 struct hws_topology_map *tm = ddr3_get_topology_map();
1763
1764 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1765 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1766 if (interface_mask & (1 << if_id)) {
1767 *interface_id = if_id;
1768 break;
1769 }
1770 }
1771
1772 return MV_OK;
1773}
1774
1775/*
1776 * Write CS Result
1777 */
1778int ddr3_tip_write_cs_result(u32 dev_num, u32 offset)
1779{
1780 u32 if_id, bus_num, cs_bitmask, data_val, cs_num;
1781 struct hws_topology_map *tm = ddr3_get_topology_map();
1782
1783 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1784 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1785 for (bus_num = 0; bus_num < tm->num_of_bus_per_interface;
1786 bus_num++) {
1787 VALIDATE_ACTIVE(tm->bus_act_mask, bus_num);
1788 cs_bitmask =
1789 tm->interface_params[if_id].
1790 as_bus_params[bus_num].cs_bitmask;
1791 if (cs_bitmask != effective_cs) {
1792 cs_num = GET_CS_FROM_MASK(cs_bitmask);
1793 ddr3_tip_bus_read(dev_num, if_id,
1794 ACCESS_TYPE_UNICAST, bus_num,
1795 DDR_PHY_DATA,
1796 offset +
1797 CS_REG_VALUE(effective_cs),
1798 &data_val);
1799 ddr3_tip_bus_write(dev_num,
1800 ACCESS_TYPE_UNICAST,
1801 if_id,
1802 ACCESS_TYPE_UNICAST,
1803 bus_num, DDR_PHY_DATA,
1804 offset +
1805 CS_REG_VALUE(cs_num),
1806 data_val);
1807 }
1808 }
1809 }
1810
1811 return MV_OK;
1812}
1813
1814/*
1815 * Write MRS
1816 */
1817int ddr3_tip_write_mrs_cmd(u32 dev_num, u32 *cs_mask_arr, u32 cmd,
1818 u32 data, u32 mask)
1819{
1820 u32 if_id, reg;
1821 struct hws_topology_map *tm = ddr3_get_topology_map();
1822
1823 reg = (cmd == MRS1_CMD) ? MR1_REG : MR2_REG;
1824 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1825 PARAM_NOT_CARE, reg, data, mask));
1826 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1827 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1828 CHECK_STATUS(ddr3_tip_if_write
1829 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1830 SDRAM_OPERATION_REG,
1831 (cs_mask_arr[if_id] << 8) | cmd, 0xf1f));
1832 }
1833
1834 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1835 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1836 if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0,
1837 0x1f, SDRAM_OPERATION_REG,
1838 MAX_POLLING_ITERATIONS) != MV_OK) {
1839 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
1840 ("write_mrs_cmd: Poll cmd fail"));
1841 }
1842 }
1843
1844 return MV_OK;
1845}
1846
1847/*
1848 * Reset XSB Read FIFO
1849 */
1850int ddr3_tip_reset_fifo_ptr(u32 dev_num)
1851{
1852 u32 if_id = 0;
1853
1854 /* Configure PHY reset value to 0 in order to "clean" the FIFO */
1855 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1856 if_id, 0x15c8, 0, 0xff000000));
1857 /*
1858 * Move PHY to RL mode (only in RL mode the PHY overrides FIFO values
1859 * during FIFO reset)
1860 */
1861 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1862 if_id, TRAINING_SW_2_REG,
1863 0x1, 0x9));
1864 /* In order that above configuration will influence the PHY */
1865 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1866 if_id, 0x15b0,
1867 0x80000000, 0x80000000));
1868 /* Reset read fifo assertion */
1869 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1870 if_id, 0x1400, 0, 0x40000000));
1871 /* Reset read fifo deassertion */
1872 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1873 if_id, 0x1400,
1874 0x40000000, 0x40000000));
1875 /* Move PHY back to functional mode */
1876 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1877 if_id, TRAINING_SW_2_REG,
1878 0x8, 0x9));
1879 /* Stop training machine */
1880 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1881 if_id, 0x15b4, 0x10000, 0x10000));
1882
1883 return MV_OK;
1884}
1885
1886/*
1887 * Reset Phy registers
1888 */
1889int ddr3_tip_ddr3_reset_phy_regs(u32 dev_num)
1890{
1891 u32 if_id, phy_id, cs;
1892 struct hws_topology_map *tm = ddr3_get_topology_map();
1893
1894 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
1895 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1896 for (phy_id = 0; phy_id < tm->num_of_bus_per_interface;
1897 phy_id++) {
1898 VALIDATE_ACTIVE(tm->bus_act_mask, phy_id);
1899 CHECK_STATUS(ddr3_tip_bus_write
1900 (dev_num, ACCESS_TYPE_UNICAST,
1901 if_id, ACCESS_TYPE_UNICAST,
1902 phy_id, DDR_PHY_DATA,
1903 WL_PHY_REG +
1904 CS_REG_VALUE(effective_cs),
1905 phy_reg0_val));
1906 CHECK_STATUS(ddr3_tip_bus_write
1907 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1908 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1909 RL_PHY_REG + CS_REG_VALUE(effective_cs),
1910 phy_reg2_val));
1911 CHECK_STATUS(ddr3_tip_bus_write
1912 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1913 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1914 READ_CENTRALIZATION_PHY_REG +
1915 CS_REG_VALUE(effective_cs), phy_reg3_val));
1916 CHECK_STATUS(ddr3_tip_bus_write
1917 (dev_num, ACCESS_TYPE_UNICAST, if_id,
1918 ACCESS_TYPE_UNICAST, phy_id, DDR_PHY_DATA,
1919 WRITE_CENTRALIZATION_PHY_REG +
1920 CS_REG_VALUE(effective_cs), phy_reg3_val));
1921 }
1922 }
1923
1924 /* Set Receiver Calibration value */
1925 for (cs = 0; cs < MAX_CS_NUM; cs++) {
1926 /* PHY register 0xdb bits[5:0] - configure to 63 */
1927 CHECK_STATUS(ddr3_tip_bus_write
1928 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1929 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1930 DDR_PHY_DATA, CSN_IOB_VREF_REG(cs), 63));
1931 }
1932
1933 return MV_OK;
1934}
1935
1936/*
1937 * Restore Dunit registers
1938 */
1939int ddr3_tip_restore_dunit_regs(u32 dev_num)
1940{
1941 u32 index_cnt;
1942
1943 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1944 PARAM_NOT_CARE, CALIB_MACHINE_CTRL_REG,
1945 0x1, 0x1));
1946 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1947 PARAM_NOT_CARE, CALIB_MACHINE_CTRL_REG,
1948 calibration_update_control << 3,
1949 0x3 << 3));
1950 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST,
1951 PARAM_NOT_CARE,
1952 ODPG_WRITE_READ_MODE_ENABLE_REG,
1953 0xffff, MASK_ALL_BITS));
1954
1955 for (index_cnt = 0; index_cnt < ARRAY_SIZE(odpg_default_value);
1956 index_cnt++) {
1957 CHECK_STATUS(ddr3_tip_if_write
1958 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
1959 odpg_default_value[index_cnt].reg_addr,
1960 odpg_default_value[index_cnt].reg_data,
1961 odpg_default_value[index_cnt].reg_mask));
1962 }
1963
1964 return MV_OK;
1965}
1966
1967/*
1968 * Auto tune main flow
1969 */
1970static int ddr3_tip_ddr3_training_main_flow(u32 dev_num)
1971{
1972 enum hws_ddr_freq freq = init_freq;
1973 struct init_cntr_param init_cntr_prm;
1974 int ret = MV_OK;
1975 u32 if_id;
1976 u32 max_cs = hws_ddr3_tip_max_cs_get();
1977 struct hws_topology_map *tm = ddr3_get_topology_map();
1978
1979#ifndef EXCLUDE_SWITCH_DEBUG
1980 if (debug_training == DEBUG_LEVEL_TRACE) {
1981 CHECK_STATUS(print_device_info((u8)dev_num));
1982 }
1983#endif
1984
1985 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
1986 CHECK_STATUS(ddr3_tip_ddr3_reset_phy_regs(dev_num));
1987 }
1988 /* Set to 0 after each loop to avoid illegal value may be used */
1989 effective_cs = 0;
1990
1991 freq = init_freq;
1992 if (is_pll_before_init != 0) {
1993 for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) {
1994 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
1995 config_func_info[dev_num].tip_set_freq_divider_func(
1996 (u8)dev_num, if_id, freq);
1997 }
1998 }
1999
2000 if (is_adll_calib_before_init != 0) {
2001 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2002 ("with adll calib before init\n"));
2003 adll_calibration(dev_num, ACCESS_TYPE_MULTICAST, 0, freq);
2004 }
2005
2006 if (is_reg_dump != 0) {
2007 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2008 ("Dump before init controller\n"));
2009 ddr3_tip_reg_dump(dev_num);
2010 }
2011
2012 if (mask_tune_func & INIT_CONTROLLER_MASK_BIT) {
2013 training_stage = INIT_CONTROLLER;
2014 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2015 ("INIT_CONTROLLER_MASK_BIT\n"));
2016 init_cntr_prm.do_mrs_phy = 1;
2017 init_cntr_prm.is_ctrl64_bit = 0;
2018 init_cntr_prm.init_phy = 1;
2019 init_cntr_prm.msys_init = 0;
2020 ret = hws_ddr3_tip_init_controller(dev_num, &init_cntr_prm);
2021 if (is_reg_dump != 0)
2022 ddr3_tip_reg_dump(dev_num);
2023 if (ret != MV_OK) {
2024 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2025 ("hws_ddr3_tip_init_controller failure\n"));
2026 if (debug_mode == 0)
2027 return MV_FAIL;
2028 }
2029 }
2030
2031#ifdef STATIC_ALGO_SUPPORT
2032 if (mask_tune_func & STATIC_LEVELING_MASK_BIT) {
2033 training_stage = STATIC_LEVELING;
2034 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2035 ("STATIC_LEVELING_MASK_BIT\n"));
2036 ret = ddr3_tip_run_static_alg(dev_num, freq);
2037 if (is_reg_dump != 0)
2038 ddr3_tip_reg_dump(dev_num);
2039 if (ret != MV_OK) {
2040 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2041 ("ddr3_tip_run_static_alg failure\n"));
2042 if (debug_mode == 0)
2043 return MV_FAIL;
2044 }
2045 }
2046#endif
2047
2048 if (mask_tune_func & SET_LOW_FREQ_MASK_BIT) {
2049 training_stage = SET_LOW_FREQ;
2050 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2051 ("SET_LOW_FREQ_MASK_BIT %d\n",
2052 freq_val[low_freq]));
2053 ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
2054 PARAM_NOT_CARE, low_freq);
2055 if (is_reg_dump != 0)
2056 ddr3_tip_reg_dump(dev_num);
2057 if (ret != MV_OK) {
2058 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2059 ("ddr3_tip_freq_set failure\n"));
2060 if (debug_mode == 0)
2061 return MV_FAIL;
2062 }
2063 }
2064
2065 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2066 if (mask_tune_func & LOAD_PATTERN_MASK_BIT) {
2067 training_stage = LOAD_PATTERN;
2068 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2069 ("LOAD_PATTERN_MASK_BIT #%d\n",
2070 effective_cs));
2071 ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
2072 if (is_reg_dump != 0)
2073 ddr3_tip_reg_dump(dev_num);
2074 if (ret != MV_OK) {
2075 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2076 ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n",
2077 effective_cs));
2078 if (debug_mode == 0)
2079 return MV_FAIL;
2080 }
2081 }
2082 }
2083 /* Set to 0 after each loop to avoid illegal value may be used */
2084 effective_cs = 0;
2085
2086 if (mask_tune_func & SET_MEDIUM_FREQ_MASK_BIT) {
2087 training_stage = SET_MEDIUM_FREQ;
2088 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2089 ("SET_MEDIUM_FREQ_MASK_BIT %d\n",
2090 freq_val[medium_freq]));
2091 ret =
2092 ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
2093 PARAM_NOT_CARE, medium_freq);
2094 if (is_reg_dump != 0)
2095 ddr3_tip_reg_dump(dev_num);
2096 if (ret != MV_OK) {
2097 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2098 ("ddr3_tip_freq_set failure\n"));
2099 if (debug_mode == 0)
2100 return MV_FAIL;
2101 }
2102 }
2103
2104 if (mask_tune_func & WRITE_LEVELING_MASK_BIT) {
2105 training_stage = WRITE_LEVELING;
2106 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2107 ("WRITE_LEVELING_MASK_BIT\n"));
2108 if ((rl_mid_freq_wa == 0) || (freq_val[medium_freq] == 533)) {
2109 ret = ddr3_tip_dynamic_write_leveling(dev_num);
2110 } else {
2111 /* Use old WL */
2112 ret = ddr3_tip_legacy_dynamic_write_leveling(dev_num);
2113 }
2114
2115 if (is_reg_dump != 0)
2116 ddr3_tip_reg_dump(dev_num);
2117 if (ret != MV_OK) {
2118 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2119 ("ddr3_tip_dynamic_write_leveling failure\n"));
2120 if (debug_mode == 0)
2121 return MV_FAIL;
2122 }
2123 }
2124
2125 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2126 if (mask_tune_func & LOAD_PATTERN_2_MASK_BIT) {
2127 training_stage = LOAD_PATTERN_2;
2128 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2129 ("LOAD_PATTERN_2_MASK_BIT CS #%d\n",
2130 effective_cs));
2131 ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
2132 if (is_reg_dump != 0)
2133 ddr3_tip_reg_dump(dev_num);
2134 if (ret != MV_OK) {
2135 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2136 ("ddr3_tip_load_all_pattern_to_mem failure CS #%d\n",
2137 effective_cs));
2138 if (debug_mode == 0)
2139 return MV_FAIL;
2140 }
2141 }
2142 }
2143 /* Set to 0 after each loop to avoid illegal value may be used */
2144 effective_cs = 0;
2145
2146 if (mask_tune_func & READ_LEVELING_MASK_BIT) {
2147 training_stage = READ_LEVELING;
2148 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2149 ("READ_LEVELING_MASK_BIT\n"));
2150 if ((rl_mid_freq_wa == 0) || (freq_val[medium_freq] == 533)) {
2151 ret = ddr3_tip_dynamic_read_leveling(dev_num, medium_freq);
2152 } else {
2153 /* Use old RL */
2154 ret = ddr3_tip_legacy_dynamic_read_leveling(dev_num);
2155 }
2156
2157 if (is_reg_dump != 0)
2158 ddr3_tip_reg_dump(dev_num);
2159 if (ret != MV_OK) {
2160 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2161 ("ddr3_tip_dynamic_read_leveling failure\n"));
2162 if (debug_mode == 0)
2163 return MV_FAIL;
2164 }
2165 }
2166
2167 if (mask_tune_func & WRITE_LEVELING_SUPP_MASK_BIT) {
2168 training_stage = WRITE_LEVELING_SUPP;
2169 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2170 ("WRITE_LEVELING_SUPP_MASK_BIT\n"));
2171 ret = ddr3_tip_dynamic_write_leveling_supp(dev_num);
2172 if (is_reg_dump != 0)
2173 ddr3_tip_reg_dump(dev_num);
2174 if (ret != MV_OK) {
2175 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2176 ("ddr3_tip_dynamic_write_leveling_supp failure\n"));
2177 if (debug_mode == 0)
2178 return MV_FAIL;
2179 }
2180 }
2181
2182 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2183 if (mask_tune_func & PBS_RX_MASK_BIT) {
2184 training_stage = PBS_RX;
2185 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2186 ("PBS_RX_MASK_BIT CS #%d\n",
2187 effective_cs));
2188 ret = ddr3_tip_pbs_rx(dev_num);
2189 if (is_reg_dump != 0)
2190 ddr3_tip_reg_dump(dev_num);
2191 if (ret != MV_OK) {
2192 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2193 ("ddr3_tip_pbs_rx failure CS #%d\n",
2194 effective_cs));
2195 if (debug_mode == 0)
2196 return MV_FAIL;
2197 }
2198 }
2199 }
2200
2201 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2202 if (mask_tune_func & PBS_TX_MASK_BIT) {
2203 training_stage = PBS_TX;
2204 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2205 ("PBS_TX_MASK_BIT CS #%d\n",
2206 effective_cs));
2207 ret = ddr3_tip_pbs_tx(dev_num);
2208 if (is_reg_dump != 0)
2209 ddr3_tip_reg_dump(dev_num);
2210 if (ret != MV_OK) {
2211 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2212 ("ddr3_tip_pbs_tx failure CS #%d\n",
2213 effective_cs));
2214 if (debug_mode == 0)
2215 return MV_FAIL;
2216 }
2217 }
2218 }
2219 /* Set to 0 after each loop to avoid illegal value may be used */
2220 effective_cs = 0;
2221
2222 if (mask_tune_func & SET_TARGET_FREQ_MASK_BIT) {
2223 training_stage = SET_TARGET_FREQ;
2224 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2225 ("SET_TARGET_FREQ_MASK_BIT %d\n",
2226 freq_val[tm->
2227 interface_params[first_active_if].
2228 memory_freq]));
2229 ret = ddr3_tip_freq_set(dev_num, ACCESS_TYPE_MULTICAST,
2230 PARAM_NOT_CARE,
2231 tm->interface_params[first_active_if].
2232 memory_freq);
2233 if (is_reg_dump != 0)
2234 ddr3_tip_reg_dump(dev_num);
2235 if (ret != MV_OK) {
2236 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2237 ("ddr3_tip_freq_set failure\n"));
2238 if (debug_mode == 0)
2239 return MV_FAIL;
2240 }
2241 }
2242
2243 if (mask_tune_func & WRITE_LEVELING_TF_MASK_BIT) {
2244 training_stage = WRITE_LEVELING_TF;
2245 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2246 ("WRITE_LEVELING_TF_MASK_BIT\n"));
2247 ret = ddr3_tip_dynamic_write_leveling(dev_num);
2248 if (is_reg_dump != 0)
2249 ddr3_tip_reg_dump(dev_num);
2250 if (ret != MV_OK) {
2251 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2252 ("ddr3_tip_dynamic_write_leveling TF failure\n"));
2253 if (debug_mode == 0)
2254 return MV_FAIL;
2255 }
2256 }
2257
2258 if (mask_tune_func & LOAD_PATTERN_HIGH_MASK_BIT) {
2259 training_stage = LOAD_PATTERN_HIGH;
2260 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("LOAD_PATTERN_HIGH\n"));
2261 ret = ddr3_tip_load_all_pattern_to_mem(dev_num);
2262 if (is_reg_dump != 0)
2263 ddr3_tip_reg_dump(dev_num);
2264 if (ret != MV_OK) {
2265 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2266 ("ddr3_tip_load_all_pattern_to_mem failure\n"));
2267 if (debug_mode == 0)
2268 return MV_FAIL;
2269 }
2270 }
2271
2272 if (mask_tune_func & READ_LEVELING_TF_MASK_BIT) {
2273 training_stage = READ_LEVELING_TF;
2274 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2275 ("READ_LEVELING_TF_MASK_BIT\n"));
2276 ret = ddr3_tip_dynamic_read_leveling(dev_num, tm->
2277 interface_params[first_active_if].
2278 memory_freq);
2279 if (is_reg_dump != 0)
2280 ddr3_tip_reg_dump(dev_num);
2281 if (ret != MV_OK) {
2282 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2283 ("ddr3_tip_dynamic_read_leveling TF failure\n"));
2284 if (debug_mode == 0)
2285 return MV_FAIL;
2286 }
2287 }
2288
2289 if (mask_tune_func & DM_PBS_TX_MASK_BIT) {
2290 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("DM_PBS_TX_MASK_BIT\n"));
2291 }
2292
2293 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2294 if (mask_tune_func & VREF_CALIBRATION_MASK_BIT) {
2295 training_stage = VREF_CALIBRATION;
2296 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("VREF\n"));
2297 ret = ddr3_tip_vref(dev_num);
2298 if (is_reg_dump != 0) {
2299 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2300 ("VREF Dump\n"));
2301 ddr3_tip_reg_dump(dev_num);
2302 }
2303 if (ret != MV_OK) {
2304 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2305 ("ddr3_tip_vref failure\n"));
2306 if (debug_mode == 0)
2307 return MV_FAIL;
2308 }
2309 }
2310 }
2311 /* Set to 0 after each loop to avoid illegal value may be used */
2312 effective_cs = 0;
2313
2314 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2315 if (mask_tune_func & CENTRALIZATION_RX_MASK_BIT) {
2316 training_stage = CENTRALIZATION_RX;
2317 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2318 ("CENTRALIZATION_RX_MASK_BIT CS #%d\n",
2319 effective_cs));
2320 ret = ddr3_tip_centralization_rx(dev_num);
2321 if (is_reg_dump != 0)
2322 ddr3_tip_reg_dump(dev_num);
2323 if (ret != MV_OK) {
2324 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2325 ("ddr3_tip_centralization_rx failure CS #%d\n",
2326 effective_cs));
2327 if (debug_mode == 0)
2328 return MV_FAIL;
2329 }
2330 }
2331 }
2332 /* Set to 0 after each loop to avoid illegal value may be used */
2333 effective_cs = 0;
2334
2335 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2336 if (mask_tune_func & WRITE_LEVELING_SUPP_TF_MASK_BIT) {
2337 training_stage = WRITE_LEVELING_SUPP_TF;
2338 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2339 ("WRITE_LEVELING_SUPP_TF_MASK_BIT CS #%d\n",
2340 effective_cs));
2341 ret = ddr3_tip_dynamic_write_leveling_supp(dev_num);
2342 if (is_reg_dump != 0)
2343 ddr3_tip_reg_dump(dev_num);
2344 if (ret != MV_OK) {
2345 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2346 ("ddr3_tip_dynamic_write_leveling_supp TF failure CS #%d\n",
2347 effective_cs));
2348 if (debug_mode == 0)
2349 return MV_FAIL;
2350 }
2351 }
2352 }
2353 /* Set to 0 after each loop to avoid illegal value may be used */
2354 effective_cs = 0;
2355
2356 for (effective_cs = 0; effective_cs < max_cs; effective_cs++) {
2357 if (mask_tune_func & CENTRALIZATION_TX_MASK_BIT) {
2358 training_stage = CENTRALIZATION_TX;
2359 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2360 ("CENTRALIZATION_TX_MASK_BIT CS #%d\n",
2361 effective_cs));
2362 ret = ddr3_tip_centralization_tx(dev_num);
2363 if (is_reg_dump != 0)
2364 ddr3_tip_reg_dump(dev_num);
2365 if (ret != MV_OK) {
2366 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2367 ("ddr3_tip_centralization_tx failure CS #%d\n",
2368 effective_cs));
2369 if (debug_mode == 0)
2370 return MV_FAIL;
2371 }
2372 }
2373 }
2374 /* Set to 0 after each loop to avoid illegal value may be used */
2375 effective_cs = 0;
2376
2377 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("restore registers to default\n"));
2378 /* restore register values */
2379 CHECK_STATUS(ddr3_tip_restore_dunit_regs(dev_num));
2380
2381 if (is_reg_dump != 0)
2382 ddr3_tip_reg_dump(dev_num);
2383
2384 return MV_OK;
2385}
2386
2387/*
2388 * DDR3 Dynamic training flow
2389 */
2390static int ddr3_tip_ddr3_auto_tune(u32 dev_num)
2391{
2392 u32 if_id, stage, ret;
2393 int is_if_fail = 0, is_auto_tune_fail = 0;
2394
2395 training_stage = INIT_CONTROLLER;
2396
2397 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
2398 for (stage = 0; stage < MAX_STAGE_LIMIT; stage++)
2399 training_result[stage][if_id] = NO_TEST_DONE;
2400 }
2401
2402 ret = ddr3_tip_ddr3_training_main_flow(dev_num);
2403
2404 /* activate XSB test */
2405 if (xsb_validate_type != 0) {
2406 run_xsb_test(dev_num, xsb_validation_base_address, 1, 1,
2407 0x1024);
2408 }
2409
2410 if (is_reg_dump != 0)
2411 ddr3_tip_reg_dump(dev_num);
2412
2413 /* print log */
2414 CHECK_STATUS(ddr3_tip_print_log(dev_num, window_mem_addr));
2415
2416 if (ret != MV_OK) {
2417 CHECK_STATUS(ddr3_tip_print_stability_log(dev_num));
2418 }
2419
2420 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
2421 is_if_fail = 0;
2422 for (stage = 0; stage < MAX_STAGE_LIMIT; stage++) {
2423 if (training_result[stage][if_id] == TEST_FAILED)
2424 is_if_fail = 1;
2425 }
2426 if (is_if_fail == 1) {
2427 is_auto_tune_fail = 1;
2428 DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO,
2429 ("Auto Tune failed for IF %d\n",
2430 if_id));
2431 }
2432 }
2433
2434 if ((ret == MV_FAIL) || (is_auto_tune_fail == 1))
2435 return MV_FAIL;
2436 else
2437 return MV_OK;
2438}
2439
2440/*
2441 * Enable init sequence
2442 */
2443int ddr3_tip_enable_init_sequence(u32 dev_num)
2444{
2445 int is_fail = 0;
2446 u32 if_id = 0, mem_mask = 0, bus_index = 0;
2447 struct hws_topology_map *tm = ddr3_get_topology_map();
2448
2449 /* Enable init sequence */
2450 CHECK_STATUS(ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, 0,
2451 SDRAM_INIT_CONTROL_REG, 0x1, 0x1));
2452
2453 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
2454 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
2455
2456 if (ddr3_tip_if_polling
2457 (dev_num, ACCESS_TYPE_UNICAST, if_id, 0, 0x1,
2458 SDRAM_INIT_CONTROL_REG,
2459 MAX_POLLING_ITERATIONS) != MV_OK) {
2460 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2461 ("polling failed IF %d\n",
2462 if_id));
2463 is_fail = 1;
2464 continue;
2465 }
2466
2467 mem_mask = 0;
2468 for (bus_index = 0; bus_index < GET_TOPOLOGY_NUM_OF_BUSES();
2469 bus_index++) {
2470 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
2471 mem_mask |=
2472 tm->interface_params[if_id].
2473 as_bus_params[bus_index].mirror_enable_bitmask;
2474 }
2475
2476 if (mem_mask != 0) {
2477 /* Disable Multi CS */
2478 CHECK_STATUS(ddr3_tip_if_write
2479 (dev_num, ACCESS_TYPE_MULTICAST,
2480 if_id, CS_ENABLE_REG, 1 << 3,
2481 1 << 3));
2482 }
2483 }
2484
2485 return (is_fail == 0) ? MV_OK : MV_FAIL;
2486}
2487
2488int ddr3_tip_register_dq_table(u32 dev_num, u32 *table)
2489{
2490 dq_map_table = table;
2491
2492 return MV_OK;
2493}
2494
2495/*
2496 * Check if pup search is locked
2497 */
2498int ddr3_tip_is_pup_lock(u32 *pup_buf, enum hws_training_result read_mode)
2499{
2500 u32 bit_start = 0, bit_end = 0, bit_id;
2501
2502 if (read_mode == RESULT_PER_BIT) {
2503 bit_start = 0;
2504 bit_end = BUS_WIDTH_IN_BITS - 1;
2505 } else {
2506 bit_start = 0;
2507 bit_end = 0;
2508 }
2509
2510 for (bit_id = bit_start; bit_id <= bit_end; bit_id++) {
2511 if (GET_LOCK_RESULT(pup_buf[bit_id]) == 0)
2512 return 0;
2513 }
2514
2515 return 1;
2516}
2517
2518/*
2519 * Get minimum buffer value
2520 */
2521u8 ddr3_tip_get_buf_min(u8 *buf_ptr)
2522{
2523 u8 min_val = 0xff;
2524 u8 cnt = 0;
2525
2526 for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) {
2527 if (buf_ptr[cnt] < min_val)
2528 min_val = buf_ptr[cnt];
2529 }
2530
2531 return min_val;
2532}
2533
2534/*
2535 * Get maximum buffer value
2536 */
2537u8 ddr3_tip_get_buf_max(u8 *buf_ptr)
2538{
2539 u8 max_val = 0;
2540 u8 cnt = 0;
2541
2542 for (cnt = 0; cnt < BUS_WIDTH_IN_BITS; cnt++) {
2543 if (buf_ptr[cnt] > max_val)
2544 max_val = buf_ptr[cnt];
2545 }
2546
2547 return max_val;
2548}
2549
2550/*
2551 * The following functions return memory parameters:
2552 * bus and device width, device size
2553 */
2554
2555u32 hws_ddr3_get_bus_width(void)
2556{
2557 struct hws_topology_map *tm = ddr3_get_topology_map();
2558
2559 return (DDR3_IS_16BIT_DRAM_MODE(tm->bus_act_mask) ==
2560 1) ? 16 : 32;
2561}
2562
2563u32 hws_ddr3_get_device_width(u32 if_id)
2564{
2565 struct hws_topology_map *tm = ddr3_get_topology_map();
2566
2567 return (tm->interface_params[if_id].bus_width ==
2568 BUS_WIDTH_8) ? 8 : 16;
2569}
2570
2571u32 hws_ddr3_get_device_size(u32 if_id)
2572{
2573 struct hws_topology_map *tm = ddr3_get_topology_map();
2574
2575 if (tm->interface_params[if_id].memory_size >=
2576 MEM_SIZE_LAST) {
2577 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2578 ("Error: Wrong device size of Cs: %d",
2579 tm->interface_params[if_id].memory_size));
2580 return 0;
2581 } else {
2582 return 1 << tm->interface_params[if_id].memory_size;
2583 }
2584}
2585
2586int hws_ddr3_calc_mem_cs_size(u32 if_id, u32 cs, u32 *cs_size)
2587{
2588 u32 cs_mem_size, dev_size;
2589
2590 dev_size = hws_ddr3_get_device_size(if_id);
2591 if (dev_size != 0) {
2592 cs_mem_size = ((hws_ddr3_get_bus_width() /
2593 hws_ddr3_get_device_width(if_id)) * dev_size);
2594
2595 /* the calculated result in Gbytex16 to avoid float using */
2596
2597 if (cs_mem_size == 2) {
2598 *cs_size = _128M;
2599 } else if (cs_mem_size == 4) {
2600 *cs_size = _256M;
2601 } else if (cs_mem_size == 8) {
2602 *cs_size = _512M;
2603 } else if (cs_mem_size == 16) {
2604 *cs_size = _1G;
2605 } else if (cs_mem_size == 32) {
2606 *cs_size = _2G;
2607 } else {
2608 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2609 ("Error: Wrong Memory size of Cs: %d", cs));
2610 return MV_FAIL;
2611 }
2612 return MV_OK;
2613 } else {
2614 return MV_FAIL;
2615 }
2616}
2617
2618int hws_ddr3_cs_base_adr_calc(u32 if_id, u32 cs, u32 *cs_base_addr)
2619{
2620 u32 cs_mem_size = 0;
2621#ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
2622 u32 physical_mem_size;
2623 u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE;
2624#endif
2625
2626 if (hws_ddr3_calc_mem_cs_size(if_id, cs, &cs_mem_size) != MV_OK)
2627 return MV_FAIL;
2628
2629#ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
2630 struct hws_topology_map *tm = ddr3_get_topology_map();
2631 /*
2632 * if number of address pins doesn't allow to use max mem size that
2633 * is defined in topology mem size is defined by
2634 * DEVICE_MAX_DRAM_ADDRESS_SIZE
2635 */
2636 physical_mem_size =
2637 mv_hwsmem_size[tm->interface_params[0].memory_size];
2638
2639 if (hws_ddr3_get_device_width(cs) == 16) {
2640 /*
2641 * 16bit mem device can be twice more - no need in less
2642 * significant pin
2643 */
2644 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2;
2645 }
2646
2647 if (physical_mem_size > max_mem_size) {
2648 cs_mem_size = max_mem_size *
2649 (hws_ddr3_get_bus_width() /
2650 hws_ddr3_get_device_width(if_id));
2651 DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR,
2652 ("Updated Physical Mem size is from 0x%x to %x\n",
2653 physical_mem_size,
2654 DEVICE_MAX_DRAM_ADDRESS_SIZE));
2655 }
2656#endif
2657
2658 /* calculate CS base addr */
2659 *cs_base_addr = ((cs_mem_size) * cs) & 0xffff0000;
2660
2661 return MV_OK;
2662}