Tony Dinh | e2c524b | 2023-01-18 19:03:04 -0800 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
| 2 | /* |
| 3 | * Copyright (C) Marvell International Ltd. and its affiliates |
| 4 | */ |
| 5 | |
| 6 | #if defined(CONFIG_DDR4) |
| 7 | |
| 8 | /* DDR4 training service API and data structures */ |
| 9 | |
| 10 | #include "ddr3_init.h" |
| 11 | #include "mv_ddr4_training.h" |
| 12 | #include "mv_ddr4_mpr_pda_if.h" |
| 13 | #include "mv_ddr4_training_leveling.h" |
| 14 | #include "mv_ddr4_training_calibration.h" |
| 15 | #include "mv_ddr_regs.h" |
| 16 | |
| 17 | /* 1 for wa and sstl and pod to get the same vref value */ |
| 18 | u8 vref_calibration_wa = 1; |
| 19 | |
| 20 | static int a39x_z1_config(u32 dev_num); |
| 21 | |
| 22 | /* vref values for vcommon */ |
| 23 | static u16 vref_val[] = { |
| 24 | 746, |
| 25 | 654, |
| 26 | 671, |
| 27 | 686, |
| 28 | 701, |
| 29 | 713, |
| 30 | 725, |
| 31 | 736 |
| 32 | }; |
| 33 | |
| 34 | static u32 mv_ddr4_config_phy_vref_tap; |
| 35 | |
| 36 | /* configure DDR4 SDRAM */ |
| 37 | int mv_ddr4_sdram_config(u32 dev_num) |
| 38 | { |
| 39 | /* TODO: zq params to be frequency dependent */ |
| 40 | u32 zq_init = 1023; |
| 41 | u32 zq_oper = 511; |
| 42 | u32 zq_cs = 127; |
| 43 | u32 if_id; |
| 44 | struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); |
| 45 | int status; |
| 46 | |
| 47 | for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { |
| 48 | VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); |
| 49 | |
| 50 | /* dtype: 0x3 for DDR4, 0x1 for DDR3 */ |
| 51 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, SDRAM_CFG_REG, |
| 52 | (0x1 << 14) | (0x1 << 20), (0x1 << 14) | (0x1 << 20)); |
| 53 | if (status != MV_OK) |
| 54 | return status; |
| 55 | |
| 56 | /* cpm */ |
| 57 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, DRAM_PINS_MUX_REG, |
| 58 | 0x2, 0x3); |
| 59 | if (status != MV_OK) |
| 60 | return status; |
| 61 | |
| 62 | /* |
| 63 | * set t_dllk to 1024 to the maximum of minimum for high speed bin |
| 64 | * TODO: may change for future speed bins |
| 65 | */ |
| 66 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, DRAM_DLL_TIMING_REG, |
| 67 | 0x400, 0xfff); |
| 68 | if (status != MV_OK) |
| 69 | return status; |
| 70 | |
| 71 | /* set zq_init */ |
| 72 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, DRAM_ZQ_INIT_TIMIMG_REG, |
| 73 | zq_init, 0xfff); |
| 74 | if (status != MV_OK) |
| 75 | return status; |
| 76 | |
| 77 | /* set zq_oper */ |
| 78 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, DRAM_ZQ_TIMING_REG, |
| 79 | zq_oper, 0x7ff); |
| 80 | if (status != MV_OK) |
| 81 | return status; |
| 82 | |
| 83 | /* set zq_cs */ |
| 84 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, DRAM_ZQ_TIMING_REG, |
| 85 | zq_cs << 16, 0x3ff0000); |
| 86 | if (status != MV_OK) |
| 87 | return status; |
| 88 | |
| 89 | /* |
| 90 | * set registered dimm to unbuffered dimm |
| 91 | * TODO: support registered dimm |
| 92 | */ |
| 93 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, SDRAM_CFG_REG, |
| 94 | 0x0, 0x1 << 17); |
| 95 | if (status != MV_OK) |
| 96 | return status; |
| 97 | } |
| 98 | |
| 99 | a39x_z1_config(dev_num); |
| 100 | |
| 101 | return MV_OK; |
| 102 | } |
| 103 | |
| 104 | u16 mv_ddr4_rtt_nom_to_odt(u16 rtt_nom) |
| 105 | { |
| 106 | u8 odt; |
| 107 | |
| 108 | if (rtt_nom == 0) |
| 109 | odt = 0xff; |
| 110 | else if (rtt_nom == (1 << 8)) |
| 111 | odt = 60; /* 240 / 4 */ |
| 112 | else if (rtt_nom == (2 << 8)) |
| 113 | odt = 120; /* 240 / 2 */ |
| 114 | else if (rtt_nom == (3 << 8)) |
| 115 | odt = 40; /* 240 / 6 */ |
| 116 | else if (rtt_nom == (4 << 8)) |
| 117 | odt = 240; /* 240 / 1 */ |
| 118 | else if (rtt_nom == (5 << 8)) |
| 119 | odt = 48; /* 240 / 5 */ |
| 120 | else if (rtt_nom == (6 << 8)) |
| 121 | odt = 80; /* 240 / 3 */ |
| 122 | else if (rtt_nom == (7 << 8)) |
| 123 | odt = 34; /* 240 / 7 */ |
| 124 | else |
| 125 | odt = 1; |
| 126 | |
| 127 | DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("mv_ddr4_rtt_nom_to_odt: rtt_nom = %d, odt = %d\n", rtt_nom, odt)); |
| 128 | |
| 129 | return odt; |
| 130 | } |
| 131 | |
| 132 | u16 mv_ddr4_rtt_wr_to_odt(u16 rtt_wr) |
| 133 | { |
| 134 | u8 odt; |
| 135 | |
| 136 | if (rtt_wr == 0) |
| 137 | odt = 0xff; |
| 138 | else if (rtt_wr == (1 << 9)) |
| 139 | odt = 120; /* 240 / 2 */ |
| 140 | else if (rtt_wr == (2 << 9)) |
| 141 | odt = 240; /* 240 / 1 */ |
| 142 | else |
| 143 | odt = 1; |
| 144 | |
| 145 | DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("mv_ddr4_rtt_wr_to_odt rtt_wr = %d, odt = %d\n", rtt_wr, odt)); |
| 146 | |
| 147 | return odt; |
| 148 | } |
| 149 | |
| 150 | static u32 mv_ddr4_rx_odt_get(void) |
| 151 | { |
| 152 | u16 odt = odt_intercept[(int)g_zpodt_data / 8] - (g_zpodt_data * odt_slope[(int)g_zpodt_data / 8]) / 100; |
| 153 | u16 rtt; |
| 154 | |
| 155 | if (g_odt_config & 0xf) { |
| 156 | rtt = mv_ddr4_rtt_nom_to_odt(g_rtt_nom); |
| 157 | odt = (odt * rtt) / (odt + rtt); |
| 158 | } |
| 159 | |
| 160 | return odt; |
| 161 | } |
| 162 | |
| 163 | static u8 mv_ddr4_vcommon_to_vref(u16 vcommon) |
| 164 | { |
| 165 | u8 vref_tap; |
| 166 | |
| 167 | if ((vcommon > 600) && (vcommon <= 662)) { |
| 168 | vref_tap = 1; |
| 169 | } else if ((vcommon > 662) && (vcommon <= 679)) { |
| 170 | vref_tap = 2; |
| 171 | } else if ((vcommon > 679) && (vcommon <= 693)) { |
| 172 | vref_tap = 3; |
| 173 | } else if ((vcommon > 693) && (vcommon <= 707)) { |
| 174 | vref_tap = 4; |
| 175 | } else if ((vcommon > 707) && (vcommon <= 719)) { |
| 176 | vref_tap = 5; |
| 177 | } else if ((vcommon > 719) && (vcommon <= 725)) { |
| 178 | vref_tap = 6; |
| 179 | } else if ((vcommon > 725) && (vcommon <= 731)) { |
| 180 | vref_tap = 7; |
| 181 | } else if ((vcommon > 731) && (vcommon <= 800)) { |
| 182 | vref_tap = 0; |
| 183 | } else if (vcommon > 800) { |
| 184 | vref_tap = 0; |
| 185 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, |
| 186 | ("mv_ddr4_vcommon_to_vref: warning: vcommon value too high: %d\n", vcommon)); |
| 187 | } else if (vcommon < 600) { |
| 188 | vref_tap = 1; |
| 189 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, |
| 190 | ("mv_ddr4_vcommon_to_vref: warning: vcommon value too low: %d\n", vcommon)); |
| 191 | } else { |
| 192 | vref_tap = 1; |
| 193 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, |
| 194 | ("mv_ddr4_vcommon_to_vref: warning: vcommon out of range: %d\n", vcommon)); |
| 195 | } |
| 196 | |
| 197 | return vref_tap; |
| 198 | } |
| 199 | |
| 200 | /* configure phy */ |
| 201 | int mv_ddr4_phy_config(u32 dev_num) |
| 202 | { |
| 203 | u8 cs, i, pod_val; |
| 204 | u32 upper_pcal, left_pcal, upper_ncal; |
| 205 | struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); |
| 206 | /* design GL params to be set outside */ |
| 207 | u32 ron = 34; /* dic - rzq / 6 or rzq / 7 */ |
| 208 | u32 rodt = mv_ddr4_rx_odt_get(); /* effective odt per DGL */ |
| 209 | u32 vcommon = (1000 * (ron + rodt / 2)) / (ron + rodt); |
| 210 | u32 vref_idx; |
| 211 | u8 rc_tap; |
| 212 | u8 subphy_max = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); |
| 213 | int status; |
| 214 | |
| 215 | mv_ddr4_config_phy_vref_tap = mv_ddr4_vcommon_to_vref(vcommon); |
| 216 | |
| 217 | /* change calculation for 1GHz frequency */ |
| 218 | if (tm->interface_params[0].memory_freq == MV_DDR_FREQ_1000) |
| 219 | mv_ddr4_config_phy_vref_tap += 2; |
| 220 | |
| 221 | vref_idx = (mv_ddr4_config_phy_vref_tap < 8) ? mv_ddr4_config_phy_vref_tap : 0; |
| 222 | rc_tap = (430 * (vref_val[vref_idx] - vcommon)) / 1000 + 33; |
| 223 | /* 0x1 for pod mode */ |
| 224 | pod_val = (vref_calibration_wa == 1) ? 0x0 : 0x1; |
| 225 | upper_pcal = pod_val; |
| 226 | left_pcal = pod_val; |
| 227 | upper_ncal = 0; |
| 228 | |
| 229 | status = ddr3_tip_bus_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, |
| 230 | PARAM_NOT_CARE, DDR_PHY_DATA, TEST_ADLL_PHY_REG, pod_val); |
| 231 | if (status != MV_OK) |
| 232 | return status; |
| 233 | |
| 234 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, GP_RSVD0_REG, |
| 235 | (upper_pcal << 12) | (left_pcal << 6) | (upper_ncal << 5), 0x1060); |
| 236 | if (status != MV_OK) |
| 237 | return status; |
| 238 | |
| 239 | /* |
| 240 | * phy register 0xbf, bit 0 - configure to pod mode (0x1) |
| 241 | * phy register 0xa8, bits [6:4] - configure to clamp (0x0) |
| 242 | * subphys (broadcast) register 0xa8, bits [2:0] - configure to int ref m (0x4), |
| 243 | * TODO: need to write it to control subphys too |
| 244 | * vref tap - configure to SSTL calibration only (4) |
| 245 | * enhanced vref value - set to no clamp (0) |
| 246 | */ |
| 247 | for (i = 0; i < subphy_max; i++) { |
| 248 | VALIDATE_BUS_ACTIVE(tm->bus_act_mask, i); |
| 249 | ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST, 0, i, DDR_PHY_DATA, PAD_CFG_PHY_REG, |
| 250 | (0 << 4) | 4, ((0x7 << 4) | 0x7)); |
| 251 | } |
| 252 | |
| 253 | for (i = 0; i < 3; i++) |
| 254 | ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST, 0, i, DDR_PHY_CONTROL, PAD_CFG_PHY_REG, |
| 255 | (0 << 4) | 4 , ((0x7 << 4) | 0x7)); |
| 256 | |
| 257 | /* phy register 0xa4, bits [13:7] - configure to 0x7c zpri /znri */ |
| 258 | status = ddr3_tip_bus_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, |
| 259 | PARAM_NOT_CARE, DDR_PHY_DATA, PAD_ZRI_CAL_PHY_REG, |
| 260 | ((0x7f & g_zpri_data) << 7) | (0x7f & g_znri_data)); |
| 261 | if (status != MV_OK) |
| 262 | return status; |
| 263 | |
| 264 | /* |
| 265 | * phy register 0xa6, bits [5:0] - configure to znodt (0x0) |
| 266 | * phy register 0xa6 bits [11:6] - configure to zpodt (60Ohm, 0x1d) |
| 267 | */ |
| 268 | status = ddr3_tip_bus_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, |
| 269 | PARAM_NOT_CARE, DDR_PHY_DATA, PAD_ODT_CAL_PHY_REG, g_zpodt_data << 6); |
| 270 | if (status != MV_OK) |
| 271 | return status; |
| 272 | |
| 273 | /* update for all active cs */ |
| 274 | for (cs = 0; cs < MAX_CS_NUM; cs++) { |
| 275 | /* |
| 276 | * writes to present cs only |
| 277 | * phy register 0xdb, bits [5:0] - configure to rcvr cal for 50% duty cycle, |
| 278 | * broadcast to all bits cs0 (0x26) |
| 279 | */ |
| 280 | status = ddr3_tip_bus_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, |
| 281 | PARAM_NOT_CARE, DDR_PHY_DATA, VREF_BCAST_PHY_REG(cs), rc_tap); |
| 282 | if (status != MV_OK) |
| 283 | return status; |
| 284 | } |
| 285 | |
| 286 | return MV_OK; |
| 287 | } |
| 288 | |
| 289 | /* |
| 290 | * configure sstl for manual calibration and pod for automatic one |
| 291 | * assumes subphy configured to pod ealier |
| 292 | */ |
| 293 | int mv_ddr4_calibration_adjust(u32 dev_num, u8 vref_en, u8 pod_only) |
| 294 | { |
| 295 | u8 i, if_id = 0; |
| 296 | u32 read_data[MAX_INTERFACE_NUM]; |
| 297 | u32 ncal = 0, pcal = 0; |
| 298 | struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); |
| 299 | int status = MV_OK; |
| 300 | u8 subphy_max = ddr3_tip_dev_attr_get(dev_num, MV_ATTR_OCTET_PER_INTERFACE); |
| 301 | u8 vref_tap = mv_ddr4_config_phy_vref_tap; |
| 302 | u32 vref_idx = (vref_tap < 8) ? vref_tap : 0; |
| 303 | |
| 304 | if (vref_calibration_wa == 0) |
| 305 | return mv_ddr4_calibration_validate(dev_num); |
| 306 | |
| 307 | if (vref_en == 1) { |
| 308 | /* enhanced vref value set to no clamp (0) */ |
| 309 | for (i = 0; i < subphy_max; i++) { |
| 310 | VALIDATE_BUS_ACTIVE(tm->bus_act_mask, i); |
| 311 | ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST, 0, i, DDR_PHY_DATA, |
| 312 | PAD_CFG_PHY_REG, (0 << 4) | vref_idx, ((0x7 << 4) | 0x7)); |
| 313 | } |
| 314 | |
| 315 | for (i = 0; i < 3; i++) |
| 316 | ddr3_tip_bus_read_modify_write(dev_num, ACCESS_TYPE_UNICAST, 0, i, DDR_PHY_CONTROL, |
| 317 | PAD_CFG_PHY_REG, (0 << 4) | vref_idx, ((0x7 << 4) | 0x7)); |
| 318 | } |
| 319 | |
| 320 | /* pad calibration control - enable */ |
| 321 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, MAIN_PADS_CAL_MACH_CTRL_REG, |
| 322 | (calibration_update_control << 3) | 0x1, (0x3 << 3) | 0x1); |
| 323 | if (status != MV_OK) |
| 324 | return status; |
| 325 | |
| 326 | /* calibration update external */ |
| 327 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, |
| 328 | MAIN_PADS_CAL_MACH_CTRL_REG, 0x2 << 3, 0x3 << 3); |
| 329 | if (status != MV_OK) |
| 330 | return status; |
| 331 | |
| 332 | /* poll init calibration done */ |
| 333 | if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0x80000000, 0x80000000, |
| 334 | MAIN_PADS_CAL_MACH_CTRL_REG, MAX_POLLING_ITERATIONS) != MV_OK) |
| 335 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, |
| 336 | ("mv_ddr4_calibration_adjust: calibration polling failed (0)\n")); |
| 337 | |
| 338 | /* poll calibration propogated to io */ |
| 339 | if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0x3ffffff, 0x3ffffff, 0x1674, |
| 340 | MAX_POLLING_ITERATIONS) != MV_OK) |
| 341 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, |
| 342 | ("mv_ddr4_calibration_adjust: calibration polling failed (1)\n")); |
| 343 | |
| 344 | mdelay(10); /* TODO: check it */ |
| 345 | |
| 346 | /* disable dynamic */ |
| 347 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, MAIN_PADS_CAL_MACH_CTRL_REG, 0, 0x1); |
| 348 | if (status != MV_OK) |
| 349 | return status; |
| 350 | |
| 351 | /* poll initial calibration done*/ |
| 352 | if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0x80000000, 0x80000000, |
| 353 | MAIN_PADS_CAL_MACH_CTRL_REG, MAX_POLLING_ITERATIONS) != MV_OK) |
| 354 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, |
| 355 | ("mv_ddr4_calibration_adjust: calibration polling failed (2)\n")); |
| 356 | |
| 357 | /* poll calibration propogated to io */ |
| 358 | if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0x3ffffff, 0x3ffffff, 0x1674, |
| 359 | MAX_POLLING_ITERATIONS) != MV_OK) |
| 360 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, |
| 361 | ("mv_ddr4_calibration_adjust: calibration polling failed (3)\n")); |
| 362 | |
| 363 | mdelay(10); /* TODO: check why polling insufficient */ |
| 364 | |
| 365 | /* read calibration value and set it manually */ |
| 366 | status = ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8, read_data, MASK_ALL_BITS); |
| 367 | if (status != MV_OK) |
| 368 | return status; |
| 369 | |
| 370 | ncal = (read_data[if_id] & (0x3f << 10)) >> 10; |
| 371 | pcal = (read_data[if_id] & (0x3f << 4)) >> 4; |
| 372 | DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, |
| 373 | ("mv_ddr4_calibration_adjust: sstl pcal = 0x%x, ncal = 0x%x\n", |
| 374 | pcal, ncal)); |
| 375 | if ((ncal >= 56) || (ncal <= 6) || (pcal >= 59) || (pcal <= 7)) { |
| 376 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, |
| 377 | ("mv_ddr4_calibration_adjust: error: sstl pcal = 0x%x, ncal = 0x%x out of range\n", |
| 378 | pcal, ncal)); |
| 379 | status = MV_FAIL; |
| 380 | } |
| 381 | |
| 382 | if (pod_only == 0) { |
| 383 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8, 0x1 << 3, 0x1 << 3); |
| 384 | if (status != MV_OK) |
| 385 | return status; |
| 386 | |
| 387 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1dc8, |
| 388 | (ncal << 22) | (pcal << 16), (0x3f << 22) | (0x3f << 16)); |
| 389 | if (status != MV_OK) |
| 390 | return status; |
| 391 | |
| 392 | /* configure to pod mode (0x1) */ |
| 393 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, |
| 394 | GP_RSVD0_REG, |
| 395 | (0x1 << 12) | (0x1 << 6) | (0x1 << 5), 0x1060); |
| 396 | if (status != MV_OK) |
| 397 | return status; |
| 398 | |
| 399 | status = ddr3_tip_bus_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, |
| 400 | PARAM_NOT_CARE, DDR_PHY_DATA, TEST_ADLL_PHY_REG, 0x1); |
| 401 | if (status != MV_OK) |
| 402 | return status; |
| 403 | |
| 404 | status = ddr3_tip_bus_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, ACCESS_TYPE_MULTICAST, |
| 405 | PARAM_NOT_CARE, DDR_PHY_CONTROL, TEST_ADLL_PHY_REG, 0x1); |
| 406 | if (status != MV_OK) |
| 407 | return status; |
| 408 | |
| 409 | /* pad calibration control - enable */ |
| 410 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, MAIN_PADS_CAL_MACH_CTRL_REG, |
| 411 | 0x1, 0x1); |
| 412 | if (status != MV_OK) |
| 413 | return status; |
| 414 | |
| 415 | /* poll initial calibration done*/ |
| 416 | if (ddr3_tip_if_polling(dev_num, ACCESS_TYPE_UNICAST, if_id, 0x80000000, 0x80000000, |
| 417 | MAIN_PADS_CAL_MACH_CTRL_REG, MAX_POLLING_ITERATIONS) != MV_OK) |
| 418 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, |
| 419 | ("mv_ddr4_calibration_adjust: calibration polling failed (4)\n")); |
| 420 | } |
| 421 | |
| 422 | /* calibration update internal */ |
| 423 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, MAIN_PADS_CAL_MACH_CTRL_REG, |
| 424 | calibration_update_control << 3, 0x3 << 3); |
| 425 | if (status != MV_OK) |
| 426 | return status; |
| 427 | |
| 428 | /* vertical */ |
| 429 | status = ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST, if_id, 0x14c8, read_data, MASK_ALL_BITS); |
| 430 | if (status != MV_OK) |
| 431 | return status; |
| 432 | ncal = (read_data[if_id] & (0x3f << 10)) >> 10; |
| 433 | pcal = (read_data[if_id] & (0x3f << 4)) >> 4; |
| 434 | DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, |
| 435 | ("mv_ddr4_calibration_adjust: pod-v pcal = 0x%x, ncal = 0x%x\n", |
| 436 | pcal, ncal)); |
| 437 | if ((ncal >= 56) || (ncal <= 6) || (pcal >= 59) || (pcal <= 7)) { |
| 438 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, |
| 439 | ("mv_ddr4_calibration_adjust: error: pod-v pcal = 0x%x, ncal = 0x%x out of range\n", |
| 440 | pcal, ncal)); |
| 441 | status = MV_FAIL; |
| 442 | } |
| 443 | |
| 444 | /* horizontal */ |
| 445 | status = ddr3_tip_if_read(dev_num, ACCESS_TYPE_UNICAST, if_id, 0x17c8, read_data, MASK_ALL_BITS); |
| 446 | if (status != MV_OK) |
| 447 | return status; |
| 448 | ncal = (read_data[if_id] & (0x3f << 10)) >> 10; |
| 449 | pcal = (read_data[if_id] & (0x3F << 4)) >> 4; |
| 450 | DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, |
| 451 | ("mv_ddr4_calibration_adjust: pod-h pcal = 0x%x, ncal = 0x%x\n", |
| 452 | pcal, ncal)); |
| 453 | if ((ncal >= 56) || (ncal <= 6) || (pcal >= 59) || (pcal <= 7)) { |
| 454 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, |
| 455 | ("mv_ddr4_calibration_adjust: error: pod-h pcal = 0x%x, ncal = 0x%x out of range\n", |
| 456 | pcal, ncal)); |
| 457 | status = MV_FAIL; |
| 458 | } |
| 459 | /* pad calibration control - disable */ |
| 460 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_MULTICAST, PARAM_NOT_CARE, MAIN_PADS_CAL_MACH_CTRL_REG, |
| 461 | (calibration_update_control << 3) | 0x0, (0x3 << 3) | 0x1); |
| 462 | if (status != MV_OK) |
| 463 | return status; |
| 464 | |
| 465 | return status; |
| 466 | } |
| 467 | |
| 468 | static int a39x_z1_config(u32 dev_num) |
| 469 | { |
| 470 | u32 if_id; |
| 471 | struct mv_ddr_topology_map *tm = mv_ddr_topology_map_get(); |
| 472 | int status; |
| 473 | |
| 474 | for (if_id = 0; if_id < MAX_INTERFACE_NUM; if_id++) { |
| 475 | VALIDATE_IF_ACTIVE(tm->if_act_mask, if_id); |
| 476 | /* |
| 477 | * xbar split bypass - dlb is off, |
| 478 | * when enabled, set to 0x1 |
| 479 | */ |
| 480 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1424, 0x0 << 3, 0x1 << 3); |
| 481 | if (status != MV_OK) |
| 482 | return status; |
| 483 | |
| 484 | /* auto power save option */ |
| 485 | status = ddr3_tip_if_write(dev_num, ACCESS_TYPE_UNICAST, if_id, 0x1474, 0x0, 0xffffffff); |
| 486 | if (status != MV_OK) |
| 487 | return status; |
| 488 | } |
| 489 | |
| 490 | return MV_OK; |
| 491 | } |
| 492 | |
| 493 | int mv_ddr4_training_main_flow(u32 dev_num) |
| 494 | { |
| 495 | int status = MV_OK; |
| 496 | u16 pbs_tap_factor[MAX_INTERFACE_NUM][MAX_BUS_NUM][BUS_WIDTH_IN_BITS] = {0}; |
| 497 | |
| 498 | if (mask_tune_func & RECEIVER_CALIBRATION_MASK_BIT) { |
| 499 | training_stage = RECEIVER_CALIBRATION; |
| 500 | DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("RECEIVER_CALIBRATION_MASK_BIT #%d\n", effective_cs)); |
| 501 | status = mv_ddr4_receiver_calibration(dev_num); |
| 502 | if (is_reg_dump != 0) |
| 503 | ddr3_tip_reg_dump(dev_num); |
| 504 | if (status != MV_OK) { |
| 505 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("mv_ddr4_receiver_calibrate failure\n")); |
| 506 | if (debug_mode == 0) |
| 507 | return status; |
| 508 | } |
| 509 | } |
| 510 | |
| 511 | if (mask_tune_func & WL_PHASE_CORRECTION_MASK_BIT) { |
| 512 | training_stage = WL_PHASE_CORRECTION; |
| 513 | DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("WL_PHASE_CORRECTION_MASK_BIT #%d\n", effective_cs)); |
| 514 | status = mv_ddr4_dynamic_wl_supp(dev_num); |
| 515 | if (is_reg_dump != 0) |
| 516 | ddr3_tip_reg_dump(dev_num); |
| 517 | if (status != MV_OK) { |
| 518 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("mv_ddr4_dynamic_wl_supp failure\n")); |
| 519 | if (debug_mode == 0) |
| 520 | return status; |
| 521 | } |
| 522 | } |
| 523 | |
| 524 | if (mask_tune_func & DQ_VREF_CALIBRATION_MASK_BIT) { |
| 525 | training_stage = DQ_VREF_CALIBRATION; |
| 526 | DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("DQ_VREF_CALIBRATION_MASK_BIT #%d\n", effective_cs)); |
| 527 | status = mv_ddr4_dq_vref_calibration(dev_num, pbs_tap_factor); |
| 528 | if (is_reg_dump != 0) |
| 529 | ddr3_tip_reg_dump(dev_num); |
| 530 | if (status != MV_OK) { |
| 531 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("mv_ddr4_dq_vref_calibrate failure\n")); |
| 532 | if (debug_mode == 0) |
| 533 | return status; |
| 534 | } |
| 535 | } |
| 536 | |
| 537 | if (mask_tune_func & DM_TUNING_MASK_BIT) { |
| 538 | training_stage = DM_TUNING; |
| 539 | DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("DM_TUNING_MASK_BIT #%d\n", effective_cs)); |
| 540 | status = mv_ddr4_dm_tuning(effective_cs, pbs_tap_factor); |
| 541 | if (is_reg_dump != 0) |
| 542 | ddr3_tip_reg_dump(dev_num); |
| 543 | if (status != MV_OK) { |
| 544 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("mv_ddr4_dm_tuning failure\n")); |
| 545 | if (debug_mode == 0) |
| 546 | return status; |
| 547 | } |
| 548 | } |
| 549 | |
| 550 | if (mask_tune_func & DQ_MAPPING_MASK_BIT) { |
| 551 | training_stage = DQ_MAPPING; |
| 552 | DEBUG_TRAINING_IP(DEBUG_LEVEL_INFO, ("DQ_MAPPING_MASK_BIT\n")); |
| 553 | status = mv_ddr4_dq_pins_mapping(dev_num); |
| 554 | if (is_reg_dump != 0) |
| 555 | ddr3_tip_reg_dump(dev_num); |
| 556 | if (status != MV_OK) { |
| 557 | DEBUG_TRAINING_IP(DEBUG_LEVEL_ERROR, ("mv_ddr4_dq_pins_mapping failure\n")); |
| 558 | if (debug_mode == 0) |
| 559 | return status; |
| 560 | } |
| 561 | } |
| 562 | |
| 563 | return status; |
| 564 | } |
| 565 | #endif /* CONFIG_DDR4 */ |