blob: b73bbf4f1b0d290516d72aff93ab6b556c7b0ae4 [file] [log] [blame]
Stefan Roese5ffceb82015-03-26 15:36:56 +01001/*
2 * Copyright (C) Marvell International Ltd. and its affiliates
3 *
4 * SPDX-License-Identifier: GPL-2.0
5 */
6
7#include <common.h>
8#include <spl.h>
9#include <asm/io.h>
10#include <asm/arch/cpu.h>
11#include <asm/arch/soc.h>
12
13#include "ddr3_init.h"
14
15/* Design Guidelines parameters */
16u32 g_zpri_data = 123; /* controller data - P drive strength */
17u32 g_znri_data = 123; /* controller data - N drive strength */
18u32 g_zpri_ctrl = 74; /* controller C/A - P drive strength */
19u32 g_znri_ctrl = 74; /* controller C/A - N drive strength */
20u32 g_zpodt_data = 45; /* controller data - P ODT */
21u32 g_znodt_data = 45; /* controller data - N ODT */
22u32 g_zpodt_ctrl = 45; /* controller data - P ODT */
23u32 g_znodt_ctrl = 45; /* controller data - N ODT */
Chris Packhamae806142018-01-18 17:16:07 +130024u32 g_odt_config_2cs = 0x120012;
25u32 g_odt_config_1cs = 0x10000;
Stefan Roese5ffceb82015-03-26 15:36:56 +010026u32 g_rtt_nom = 0x44;
27u32 g_dic = 0x2;
28
29#ifdef STATIC_ALGO_SUPPORT
30
31#define PARAM_NOT_CARE 0
32#define MAX_STATIC_SEQ 48
33
34u32 silicon_delay[HWS_MAX_DEVICE_NUM];
35struct hws_tip_static_config_info static_config[HWS_MAX_DEVICE_NUM];
36static reg_data *static_init_controller_config[HWS_MAX_DEVICE_NUM];
37
38/* debug delay in write leveling */
39int wl_debug_delay = 0;
40/* pup register #3 for functional board */
41int function_reg_value = 8;
42u32 silicon;
43
44u32 read_ready_delay_phase_offset[] = { 4, 4, 4, 4, 6, 6, 6, 6 };
45
46static struct cs_element chip_select_map[] = {
47 /* CS Value (single only) Num_CS */
48 {0, 0},
49 {0, 1},
50 {1, 1},
51 {0, 2},
52 {2, 1},
53 {0, 2},
54 {0, 2},
55 {0, 3},
56 {3, 1},
57 {0, 2},
58 {0, 2},
59 {0, 3},
60 {0, 2},
61 {0, 3},
62 {0, 3},
63 {0, 4}
64};
65
66/*
67 * Register static init controller DB
68 */
69int ddr3_tip_init_specific_reg_config(u32 dev_num, reg_data *reg_config_arr)
70{
71 static_init_controller_config[dev_num] = reg_config_arr;
72 return MV_OK;
73}
74
75/*
76 * Register static info DB
77 */
78int ddr3_tip_init_static_config_db(
79 u32 dev_num, struct hws_tip_static_config_info *static_config_info)
80{
81 static_config[dev_num].board_trace_arr =
82 static_config_info->board_trace_arr;
83 static_config[dev_num].package_trace_arr =
84 static_config_info->package_trace_arr;
85 silicon_delay[dev_num] = static_config_info->silicon_delay;
86
87 return MV_OK;
88}
89
90/*
91 * Static round trip flow - Calculates the total round trip delay.
92 */
93int ddr3_tip_static_round_trip_arr_build(u32 dev_num,
94 struct trip_delay_element *table_ptr,
95 int is_wl, u32 *round_trip_delay_arr)
96{
97 u32 bus_index, global_bus;
98 u32 if_id;
99 u32 bus_per_interface;
100 int sign;
101 u32 temp;
102 u32 board_trace;
103 struct trip_delay_element *pkg_delay_ptr;
104 struct hws_topology_map *tm = ddr3_get_topology_map();
105
106 /*
107 * In WL we calc the diff between Clock to DQs in RL we sum the round
108 * trip of Clock and DQs
109 */
110 sign = (is_wl) ? -1 : 1;
111
112 bus_per_interface = GET_TOPOLOGY_NUM_OF_BUSES();
113
114 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
115 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
116 for (bus_index = 0; bus_index < bus_per_interface;
117 bus_index++) {
118 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
119 global_bus = (if_id * bus_per_interface) + bus_index;
120
121 /* calculate total trip delay (package and board) */
122 board_trace = (table_ptr[global_bus].dqs_delay * sign) +
123 table_ptr[global_bus].ck_delay;
124 temp = (board_trace * 163) / 1000;
125
126 /* Convert the length to delay in psec units */
127 pkg_delay_ptr =
128 static_config[dev_num].package_trace_arr;
129 round_trip_delay_arr[global_bus] = temp +
130 (int)(pkg_delay_ptr[global_bus].dqs_delay *
131 sign) +
132 (int)pkg_delay_ptr[global_bus].ck_delay +
133 (int)((is_wl == 1) ? wl_debug_delay :
134 (int)silicon_delay[dev_num]);
135 DEBUG_TRAINING_STATIC_IP(
136 DEBUG_LEVEL_TRACE,
137 ("Round Trip Build round_trip_delay_arr[0x%x]: 0x%x temp 0x%x\n",
138 global_bus, round_trip_delay_arr[global_bus],
139 temp));
140 }
141 }
142
143 return MV_OK;
144}
145
146/*
147 * Write leveling for static flow - calculating the round trip delay of the
148 * DQS signal.
149 */
150int ddr3_tip_write_leveling_static_config(u32 dev_num, u32 if_id,
151 enum hws_ddr_freq frequency,
152 u32 *round_trip_delay_arr)
153{
154 u32 bus_index; /* index to the bus loop */
155 u32 bus_start_index;
156 u32 bus_per_interface;
157 u32 phase = 0;
158 u32 adll = 0, adll_cen, adll_inv, adll_final;
159 u32 adll_period = MEGA / freq_val[frequency] / 64;
160
161 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
162 ("ddr3_tip_write_leveling_static_config\n"));
163 DEBUG_TRAINING_STATIC_IP(
164 DEBUG_LEVEL_TRACE,
165 ("dev_num 0x%x IF 0x%x freq %d (adll_period 0x%x)\n",
166 dev_num, if_id, frequency, adll_period));
167
168 bus_per_interface = GET_TOPOLOGY_NUM_OF_BUSES();
169 bus_start_index = if_id * bus_per_interface;
170 for (bus_index = bus_start_index;
171 bus_index < (bus_start_index + bus_per_interface); bus_index++) {
172 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
173 phase = round_trip_delay_arr[bus_index] / (32 * adll_period);
174 adll = (round_trip_delay_arr[bus_index] -
175 (phase * 32 * adll_period)) / adll_period;
176 adll = (adll > 31) ? 31 : adll;
177 adll_cen = 16 + adll;
178 adll_inv = adll_cen / 32;
179 adll_final = adll_cen - (adll_inv * 32);
180 adll_final = (adll_final > 31) ? 31 : adll_final;
181
182 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
183 ("\t%d - phase 0x%x adll 0x%x\n",
184 bus_index, phase, adll));
185 /*
186 * Writing to all 4 phy of Interface number,
187 * bit 0 \96 4 \96 ADLL, bit 6-8 phase
188 */
189 CHECK_STATUS(ddr3_tip_bus_read_modify_write
190 (dev_num, ACCESS_TYPE_UNICAST, if_id,
191 (bus_index % 4), DDR_PHY_DATA,
192 PHY_WRITE_DELAY(cs),
193 ((phase << 6) + (adll & 0x1f)), 0x1df));
194 CHECK_STATUS(ddr3_tip_bus_write
195 (dev_num, ACCESS_TYPE_UNICAST, if_id,
196 ACCESS_TYPE_UNICAST, (bus_index % 4),
197 DDR_PHY_DATA, WRITE_CENTRALIZATION_PHY_REG,
198 ((adll_inv & 0x1) << 5) + adll_final));
199 }
200
201 return MV_OK;
202}
203
204/*
205 * Read leveling for static flow
206 */
207int ddr3_tip_read_leveling_static_config(u32 dev_num,
208 u32 if_id,
209 enum hws_ddr_freq frequency,
210 u32 *total_round_trip_delay_arr)
211{
212 u32 cs, data0, data1, data3 = 0;
213 u32 bus_index; /* index to the bus loop */
214 u32 bus_start_index;
215 u32 phase0, phase1, max_phase;
216 u32 adll0, adll1;
217 u32 cl_value;
218 u32 min_delay;
219 u32 sdr_period = MEGA / freq_val[frequency];
220 u32 ddr_period = MEGA / freq_val[frequency] / 2;
221 u32 adll_period = MEGA / freq_val[frequency] / 64;
222 enum hws_speed_bin speed_bin_index;
223 u32 rd_sample_dly[MAX_CS_NUM] = { 0 };
224 u32 rd_ready_del[MAX_CS_NUM] = { 0 };
225 u32 bus_per_interface = GET_TOPOLOGY_NUM_OF_BUSES();
226 struct hws_topology_map *tm = ddr3_get_topology_map();
227
228 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
229 ("ddr3_tip_read_leveling_static_config\n"));
230 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
231 ("dev_num 0x%x ifc 0x%x freq %d\n", dev_num,
232 if_id, frequency));
233 DEBUG_TRAINING_STATIC_IP(
234 DEBUG_LEVEL_TRACE,
235 ("Sdr_period 0x%x Ddr_period 0x%x adll_period 0x%x\n",
236 sdr_period, ddr_period, adll_period));
237
238 if (tm->interface_params[first_active_if].memory_freq ==
239 frequency) {
240 cl_value = tm->interface_params[first_active_if].cas_l;
241 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
242 ("cl_value 0x%x\n", cl_value));
243 } else {
244 speed_bin_index = tm->interface_params[if_id].speed_bin_index;
245 cl_value = cas_latency_table[speed_bin_index].cl_val[frequency];
246 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
247 ("cl_value 0x%x speed_bin_index %d\n",
248 cl_value, speed_bin_index));
249 }
250
251 bus_start_index = if_id * bus_per_interface;
252
253 for (bus_index = bus_start_index;
254 bus_index < (bus_start_index + bus_per_interface);
255 bus_index += 2) {
256 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
257 cs = chip_select_map[
258 tm->interface_params[if_id].as_bus_params[
259 (bus_index % 4)].cs_bitmask].cs_num;
260
261 /* read sample delay calculation */
262 min_delay = (total_round_trip_delay_arr[bus_index] <
263 total_round_trip_delay_arr[bus_index + 1]) ?
264 total_round_trip_delay_arr[bus_index] :
265 total_round_trip_delay_arr[bus_index + 1];
266 /* round down */
267 rd_sample_dly[cs] = 2 * (min_delay / (sdr_period * 2));
268 DEBUG_TRAINING_STATIC_IP(
269 DEBUG_LEVEL_TRACE,
270 ("\t%d - min_delay 0x%x cs 0x%x rd_sample_dly[cs] 0x%x\n",
271 bus_index, min_delay, cs, rd_sample_dly[cs]));
272
273 /* phase calculation */
274 phase0 = (total_round_trip_delay_arr[bus_index] -
275 (sdr_period * rd_sample_dly[cs])) / (ddr_period);
276 phase1 = (total_round_trip_delay_arr[bus_index + 1] -
277 (sdr_period * rd_sample_dly[cs])) / (ddr_period);
278 max_phase = (phase0 > phase1) ? phase0 : phase1;
279 DEBUG_TRAINING_STATIC_IP(
280 DEBUG_LEVEL_TRACE,
281 ("\tphase0 0x%x phase1 0x%x max_phase 0x%x\n",
282 phase0, phase1, max_phase));
283
284 /* ADLL calculation */
285 adll0 = (u32)((total_round_trip_delay_arr[bus_index] -
286 (sdr_period * rd_sample_dly[cs]) -
287 (ddr_period * phase0)) / adll_period);
288 adll0 = (adll0 > 31) ? 31 : adll0;
289 adll1 = (u32)((total_round_trip_delay_arr[bus_index + 1] -
290 (sdr_period * rd_sample_dly[cs]) -
291 (ddr_period * phase1)) / adll_period);
292 adll1 = (adll1 > 31) ? 31 : adll1;
293
294 /* The Read delay close the Read FIFO */
295 rd_ready_del[cs] = rd_sample_dly[cs] +
296 read_ready_delay_phase_offset[max_phase];
297 DEBUG_TRAINING_STATIC_IP(
298 DEBUG_LEVEL_TRACE,
299 ("\tadll0 0x%x adll1 0x%x rd_ready_del[cs] 0x%x\n",
300 adll0, adll1, rd_ready_del[cs]));
301
302 /*
303 * Write to the phy of Interface (bit 0 \96 4 \96 ADLL,
304 * bit 6-8 phase)
305 */
306 data0 = ((phase0 << 6) + (adll0 & 0x1f));
307 data1 = ((phase1 << 6) + (adll1 & 0x1f));
308
309 CHECK_STATUS(ddr3_tip_bus_read_modify_write
310 (dev_num, ACCESS_TYPE_UNICAST, if_id,
311 (bus_index % 4), DDR_PHY_DATA, PHY_READ_DELAY(cs),
312 data0, 0x1df));
313 CHECK_STATUS(ddr3_tip_bus_read_modify_write
314 (dev_num, ACCESS_TYPE_UNICAST, if_id,
315 ((bus_index + 1) % 4), DDR_PHY_DATA,
316 PHY_READ_DELAY(cs), data1, 0x1df));
317 }
318
319 for (bus_index = 0; bus_index < bus_per_interface; bus_index++) {
320 VALIDATE_ACTIVE(tm->bus_act_mask, bus_index);
321 CHECK_STATUS(ddr3_tip_bus_read_modify_write
322 (dev_num, ACCESS_TYPE_UNICAST, if_id,
323 bus_index, DDR_PHY_DATA, 0x3, data3, 0x1f));
324 }
325 CHECK_STATUS(ddr3_tip_if_write
326 (dev_num, ACCESS_TYPE_UNICAST, if_id,
327 READ_DATA_SAMPLE_DELAY,
328 (rd_sample_dly[0] + cl_value) + (rd_sample_dly[1] << 8),
329 MASK_ALL_BITS));
330
331 /* Read_ready_del0 bit 0-4 , CS bits 8-12 */
332 CHECK_STATUS(ddr3_tip_if_write
333 (dev_num, ACCESS_TYPE_UNICAST, if_id,
334 READ_DATA_READY_DELAY,
335 rd_ready_del[0] + (rd_ready_del[1] << 8) + cl_value,
336 MASK_ALL_BITS));
337
338 return MV_OK;
339}
340
341/*
342 * DDR3 Static flow
343 */
344int ddr3_tip_run_static_alg(u32 dev_num, enum hws_ddr_freq freq)
345{
346 u32 if_id = 0;
347 struct trip_delay_element *table_ptr;
348 u32 wl_total_round_trip_delay_arr[MAX_TOTAL_BUS_NUM];
349 u32 rl_total_round_trip_delay_arr[MAX_TOTAL_BUS_NUM];
350 struct init_cntr_param init_cntr_prm;
351 int ret;
352 struct hws_topology_map *tm = ddr3_get_topology_map();
353
354 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
355 ("ddr3_tip_run_static_alg"));
356
357 init_cntr_prm.do_mrs_phy = 1;
358 init_cntr_prm.is_ctrl64_bit = 0;
359 init_cntr_prm.init_phy = 1;
360 ret = hws_ddr3_tip_init_controller(dev_num, &init_cntr_prm);
361 if (ret != MV_OK) {
362 DEBUG_TRAINING_STATIC_IP(
363 DEBUG_LEVEL_ERROR,
364 ("hws_ddr3_tip_init_controller failure\n"));
365 }
366
367 /* calculate the round trip delay for Write Leveling */
368 table_ptr = static_config[dev_num].board_trace_arr;
369 CHECK_STATUS(ddr3_tip_static_round_trip_arr_build
370 (dev_num, table_ptr, 1,
371 wl_total_round_trip_delay_arr));
372 /* calculate the round trip delay for Read Leveling */
373 CHECK_STATUS(ddr3_tip_static_round_trip_arr_build
374 (dev_num, table_ptr, 0,
375 rl_total_round_trip_delay_arr));
376
377 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
378 /* check if the interface is enabled */
379 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
380 /*
381 * Static frequency is defined according to init-frequency
382 * (not target)
383 */
384 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
385 ("Static IF %d freq %d\n",
386 if_id, freq));
387 CHECK_STATUS(ddr3_tip_write_leveling_static_config
388 (dev_num, if_id, freq,
389 wl_total_round_trip_delay_arr));
390 CHECK_STATUS(ddr3_tip_read_leveling_static_config
391 (dev_num, if_id, freq,
392 rl_total_round_trip_delay_arr));
393 }
394
395 return MV_OK;
396}
397
398/*
399 * Init controller for static flow
400 */
401int ddr3_tip_static_init_controller(u32 dev_num)
402{
403 u32 index_cnt = 0;
404
405 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
406 ("ddr3_tip_static_init_controller\n"));
407 while (static_init_controller_config[dev_num][index_cnt].reg_addr !=
408 0) {
409 CHECK_STATUS(ddr3_tip_if_write
410 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
411 static_init_controller_config[dev_num][index_cnt].
412 reg_addr,
413 static_init_controller_config[dev_num][index_cnt].
414 reg_data,
415 static_init_controller_config[dev_num][index_cnt].
416 reg_mask));
417
418 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
419 ("Init_controller index_cnt %d\n",
420 index_cnt));
421 index_cnt++;
422 }
423
424 return MV_OK;
425}
426
427int ddr3_tip_static_phy_init_controller(u32 dev_num)
428{
429 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
430 ("Phy Init Controller 2\n"));
431 CHECK_STATUS(ddr3_tip_bus_write
432 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
433 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA, 0xa4,
434 0x3dfe));
435
436 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
437 ("Phy Init Controller 3\n"));
438 CHECK_STATUS(ddr3_tip_bus_write
439 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
440 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA, 0xa6,
441 0xcb2));
442
443 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
444 ("Phy Init Controller 4\n"));
445 CHECK_STATUS(ddr3_tip_bus_write
446 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
447 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA, 0xa9,
448 0));
449
450 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
451 ("Static Receiver Calibration\n"));
452 CHECK_STATUS(ddr3_tip_bus_write
453 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
454 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA, 0xd0,
455 0x1f));
456
457 DEBUG_TRAINING_STATIC_IP(DEBUG_LEVEL_TRACE,
458 ("Static V-REF Calibration\n"));
459 CHECK_STATUS(ddr3_tip_bus_write
460 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
461 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA, 0xa8,
462 0x434));
463
464 return MV_OK;
465}
466#endif
467
468/*
469 * Configure phy (called by static init controller) for static flow
470 */
471int ddr3_tip_configure_phy(u32 dev_num)
472{
473 u32 if_id, phy_id;
474 struct hws_topology_map *tm = ddr3_get_topology_map();
475
476 CHECK_STATUS(ddr3_tip_bus_write
477 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
478 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA,
479 PAD_ZRI_CALIB_PHY_REG,
480 ((0x7f & g_zpri_data) << 7 | (0x7f & g_znri_data))));
481 CHECK_STATUS(ddr3_tip_bus_write
482 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
483 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_CONTROL,
484 PAD_ZRI_CALIB_PHY_REG,
485 ((0x7f & g_zpri_ctrl) << 7 | (0x7f & g_znri_ctrl))));
486 CHECK_STATUS(ddr3_tip_bus_write
487 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
488 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA,
489 PAD_ODT_CALIB_PHY_REG,
490 ((0x3f & g_zpodt_data) << 6 | (0x3f & g_znodt_data))));
491 CHECK_STATUS(ddr3_tip_bus_write
492 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
493 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_CONTROL,
494 PAD_ODT_CALIB_PHY_REG,
495 ((0x3f & g_zpodt_ctrl) << 6 | (0x3f & g_znodt_ctrl))));
496
497 CHECK_STATUS(ddr3_tip_bus_write
498 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
499 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA,
500 PAD_PRE_DISABLE_PHY_REG, 0));
501 CHECK_STATUS(ddr3_tip_bus_write
502 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
503 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA,
504 CMOS_CONFIG_PHY_REG, 0));
505 CHECK_STATUS(ddr3_tip_bus_write
506 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
507 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_CONTROL,
508 CMOS_CONFIG_PHY_REG, 0));
509
510 for (if_id = 0; if_id <= MAX_INTERFACE_NUM - 1; if_id++) {
511 /* check if the interface is enabled */
512 VALIDATE_ACTIVE(tm->if_act_mask, if_id);
513
514 for (phy_id = 0;
515 phy_id < tm->num_of_bus_per_interface;
516 phy_id++) {
517 VALIDATE_ACTIVE(tm->bus_act_mask, phy_id);
518 /* Vref & clamp */
519 CHECK_STATUS(ddr3_tip_bus_read_modify_write
520 (dev_num, ACCESS_TYPE_UNICAST,
521 if_id, phy_id, DDR_PHY_DATA,
522 PAD_CONFIG_PHY_REG,
523 ((clamp_tbl[if_id] << 4) | vref),
524 ((0x7 << 4) | 0x7)));
525 /* clamp not relevant for control */
526 CHECK_STATUS(ddr3_tip_bus_read_modify_write
527 (dev_num, ACCESS_TYPE_UNICAST,
528 if_id, phy_id, DDR_PHY_CONTROL,
529 PAD_CONFIG_PHY_REG, 0x4, 0x7));
530 }
531 }
532
533 CHECK_STATUS(ddr3_tip_bus_write
534 (dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE,
535 ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, DDR_PHY_DATA, 0x90,
536 0x6002));
537
538 return MV_OK;
539}