blob: c169a8ea16b4b2024d8aa259f85ba2fcd0303765 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0
Stefan Roeseae6223d2015-01-19 11:33:40 +01002/*
3 * Copyright (C) Marvell International Ltd. and its affiliates
Stefan Roeseae6223d2015-01-19 11:33:40 +01004 */
5
Stefan Roeseae6223d2015-01-19 11:33:40 +01006#include <i2c.h>
7#include <spl.h>
8#include <asm/io.h>
9#include <asm/arch/cpu.h>
10#include <asm/arch/soc.h>
11
12#include "ddr3_init.h"
13
14#if defined(MV88F78X60)
15#include "ddr3_axp_config.h"
16#elif defined(MV88F67XX)
17#include "ddr3_a370_config.h"
18#endif
19
20#if defined(MV88F672X)
21#include "ddr3_a375_config.h"
22#endif
23
24#ifdef DUNIT_SPD
25
26/* DIMM SPD offsets */
27#define SPD_DEV_TYPE_BYTE 2
28
29#define SPD_MODULE_TYPE_BYTE 3
30#define SPD_MODULE_MASK 0xf
31#define SPD_MODULE_TYPE_RDIMM 1
32#define SPD_MODULE_TYPE_UDIMM 2
33
34#define SPD_DEV_DENSITY_BYTE 4
35#define SPD_DEV_DENSITY_MASK 0xf
36
37#define SPD_ROW_NUM_BYTE 5
38#define SPD_ROW_NUM_MIN 12
39#define SPD_ROW_NUM_OFF 3
40#define SPD_ROW_NUM_MASK (7 << SPD_ROW_NUM_OFF)
41
42#define SPD_COL_NUM_BYTE 5
43#define SPD_COL_NUM_MIN 9
44#define SPD_COL_NUM_OFF 0
45#define SPD_COL_NUM_MASK (7 << SPD_COL_NUM_OFF)
46
47#define SPD_MODULE_ORG_BYTE 7
Wolfgang Denk62fb2b42021-09-27 17:42:39 +020048#define SPD_MODULE_SDRAM_DEV_WIDTH_OFF 0
Stefan Roeseae6223d2015-01-19 11:33:40 +010049#define SPD_MODULE_SDRAM_DEV_WIDTH_MASK (7 << SPD_MODULE_SDRAM_DEV_WIDTH_OFF)
50#define SPD_MODULE_BANK_NUM_MIN 1
51#define SPD_MODULE_BANK_NUM_OFF 3
52#define SPD_MODULE_BANK_NUM_MASK (7 << SPD_MODULE_BANK_NUM_OFF)
53
54#define SPD_BUS_WIDTH_BYTE 8
55#define SPD_BUS_WIDTH_OFF 0
56#define SPD_BUS_WIDTH_MASK (7 << SPD_BUS_WIDTH_OFF)
57#define SPD_BUS_ECC_OFF 3
58#define SPD_BUS_ECC_MASK (3 << SPD_BUS_ECC_OFF)
59
60#define SPD_MTB_DIVIDEND_BYTE 10
61#define SPD_MTB_DIVISOR_BYTE 11
62#define SPD_TCK_BYTE 12
63#define SPD_SUP_CAS_LAT_LSB_BYTE 14
64#define SPD_SUP_CAS_LAT_MSB_BYTE 15
65#define SPD_TAA_BYTE 16
66#define SPD_TWR_BYTE 17
67#define SPD_TRCD_BYTE 18
68#define SPD_TRRD_BYTE 19
69#define SPD_TRP_BYTE 20
70
71#define SPD_TRAS_MSB_BYTE 21
72#define SPD_TRAS_MSB_MASK 0xf
73
74#define SPD_TRC_MSB_BYTE 21
75#define SPD_TRC_MSB_MASK 0xf0
76
77#define SPD_TRAS_LSB_BYTE 22
78#define SPD_TRC_LSB_BYTE 23
79#define SPD_TRFC_LSB_BYTE 24
80#define SPD_TRFC_MSB_BYTE 25
81#define SPD_TWTR_BYTE 26
82#define SPD_TRTP_BYTE 27
83
84#define SPD_TFAW_MSB_BYTE 28
85#define SPD_TFAW_MSB_MASK 0xf
86
87#define SPD_TFAW_LSB_BYTE 29
88#define SPD_OPT_FEATURES_BYTE 30
89#define SPD_THERMAL_REFRESH_OPT_BYTE 31
90
91#define SPD_ADDR_MAP_BYTE 63
92#define SPD_ADDR_MAP_MIRROR_OFFS 0
93
94#define SPD_RDIMM_RC_BYTE 69
95#define SPD_RDIMM_RC_NIBBLE_MASK 0xF
96#define SPD_RDIMM_RC_NUM 16
97
98/* Dimm Memory Type values */
99#define SPD_MEM_TYPE_SDRAM 0x4
100#define SPD_MEM_TYPE_DDR1 0x7
101#define SPD_MEM_TYPE_DDR2 0x8
102#define SPD_MEM_TYPE_DDR3 0xB
103
104#define DIMM_MODULE_MANU_OFFS 64
105#define DIMM_MODULE_MANU_SIZE 8
106#define DIMM_MODULE_VEN_OFFS 73
107#define DIMM_MODULE_VEN_SIZE 25
108#define DIMM_MODULE_ID_OFFS 99
109#define DIMM_MODULE_ID_SIZE 18
110
111/* enumeration for voltage levels. */
112enum dimm_volt_if {
113 TTL_5V_TOLERANT,
114 LVTTL,
115 HSTL_1_5V,
116 SSTL_3_3V,
117 SSTL_2_5V,
118 VOLTAGE_UNKNOWN,
119};
120
121/* enumaration for SDRAM CAS Latencies. */
122enum dimm_sdram_cas {
123 SD_CL_1 = 1,
124 SD_CL_2,
125 SD_CL_3,
126 SD_CL_4,
127 SD_CL_5,
128 SD_CL_6,
129 SD_CL_7,
130 SD_FAULT
131};
132
133/* enumeration for memory types */
134enum memory_type {
135 MEM_TYPE_SDRAM,
136 MEM_TYPE_DDR1,
137 MEM_TYPE_DDR2,
138 MEM_TYPE_DDR3
139};
140
141/* DIMM information structure */
142typedef struct dimm_info {
143 /* DIMM dimensions */
144 u32 num_of_module_ranks;
145 u32 data_width;
146 u32 rank_capacity;
147 u32 num_of_devices;
148
149 u32 sdram_width;
150 u32 num_of_banks_on_each_device;
151 u32 sdram_capacity;
152
153 u32 num_of_row_addr;
154 u32 num_of_col_addr;
155
156 u32 addr_mirroring;
157
158 u32 err_check_type; /* ECC , PARITY.. */
159 u32 type_info; /* DDR2 only */
160
161 /* DIMM timing parameters */
162 u32 supported_cas_latencies;
163 u32 refresh_interval;
164 u32 min_cycle_time;
165 u32 min_row_precharge_time;
166 u32 min_row_active_to_row_active;
167 u32 min_ras_to_cas_delay;
168 u32 min_write_recovery_time; /* DDR3/2 only */
169 u32 min_write_to_read_cmd_delay; /* DDR3/2 only */
170 u32 min_read_to_prech_cmd_delay; /* DDR3/2 only */
171 u32 min_active_to_precharge;
172 u32 min_refresh_recovery; /* DDR3/2 only */
173 u32 min_cas_lat_time;
174 u32 min_four_active_win_delay;
175 u8 dimm_rc[SPD_RDIMM_RC_NUM];
176
177 /* DIMM vendor ID */
178 u32 vendor;
179} MV_DIMM_INFO;
180
181static int ddr3_spd_sum_init(MV_DIMM_INFO *info, MV_DIMM_INFO *sum_info,
182 u32 dimm);
183static u32 ddr3_get_max_val(u32 spd_val, u32 dimm_num, u32 static_val);
184static u32 ddr3_get_min_val(u32 spd_val, u32 dimm_num, u32 static_val);
185static int ddr3_spd_init(MV_DIMM_INFO *info, u32 dimm_addr, u32 dimm_width);
186static u32 ddr3_div(u32 val, u32 divider, u32 sub);
187
188extern u8 spd_data[SPD_SIZE];
189extern u32 odt_config[ODT_OPT];
190extern u16 odt_static[ODT_OPT][MAX_CS];
191extern u16 odt_dynamic[ODT_OPT][MAX_CS];
192
193#if !(defined(DB_88F6710) || defined(DB_88F6710_PCAC) || defined(RD_88F6710))
194/*
195 * Name: ddr3_get_dimm_num - Find number of dimms and their addresses
196 * Desc:
197 * Args: dimm_addr - array of dimm addresses
198 * Notes:
199 * Returns: None.
200 */
201static u32 ddr3_get_dimm_num(u32 *dimm_addr)
202{
203 u32 dimm_cur_addr;
204 u8 data[3];
205 u32 dimm_num = 0;
206 int ret;
207
208 /* Read the dimm eeprom */
209 for (dimm_cur_addr = MAX_DIMM_ADDR; dimm_cur_addr > MIN_DIMM_ADDR;
210 dimm_cur_addr--) {
Stefan Roese11c66932021-11-18 09:19:38 +0100211 struct udevice *udev;
212
Stefan Roeseae6223d2015-01-19 11:33:40 +0100213 data[SPD_DEV_TYPE_BYTE] = 0;
214
215 /* Far-End DIMM must be connected */
216 if ((dimm_num == 0) && (dimm_cur_addr < FAR_END_DIMM_ADDR))
217 return 0;
218
Stefan Roese11c66932021-11-18 09:19:38 +0100219 ret = i2c_get_chip_for_busnum(0, dimm_cur_addr, 1, &udev);
220 if (ret)
221 continue;
222
223 ret = dm_i2c_read(udev, 0, data, 3);
Stefan Roeseae6223d2015-01-19 11:33:40 +0100224 if (!ret) {
225 if (data[SPD_DEV_TYPE_BYTE] == SPD_MEM_TYPE_DDR3) {
226 dimm_addr[dimm_num] = dimm_cur_addr;
227 dimm_num++;
228 }
229 }
230 }
231
232 return dimm_num;
233}
234#endif
235
236/*
237 * Name: dimmSpdInit - Get the SPD parameters.
238 * Desc: Read the DIMM SPD parameters into given struct parameter.
239 * Args: dimmNum - DIMM number. See MV_BOARD_DIMM_NUM enumerator.
240 * info - DIMM information structure.
241 * Notes:
242 * Returns: MV_OK if function could read DIMM parameters, 0 otherwise.
243 */
244int ddr3_spd_init(MV_DIMM_INFO *info, u32 dimm_addr, u32 dimm_width)
245{
246 u32 tmp;
247 u32 time_base;
248 int ret;
249 __maybe_unused u32 rc;
250 __maybe_unused u8 vendor_high, vendor_low;
251
252 if (dimm_addr != 0) {
Stefan Roese11c66932021-11-18 09:19:38 +0100253 struct udevice *udev;
254
Stefan Roeseae6223d2015-01-19 11:33:40 +0100255 memset(spd_data, 0, SPD_SIZE * sizeof(u8));
256
Stefan Roese11c66932021-11-18 09:19:38 +0100257 ret = i2c_get_chip_for_busnum(0, dimm_addr, 1, &udev);
258 if (ret)
259 return MV_DDR3_TRAINING_ERR_TWSI_FAIL;
260
261 ret = dm_i2c_read(udev, 0, spd_data, SPD_SIZE);
Stefan Roeseae6223d2015-01-19 11:33:40 +0100262 if (ret)
263 return MV_DDR3_TRAINING_ERR_TWSI_FAIL;
264 }
265
266 /* Check if DDR3 */
267 if (spd_data[SPD_DEV_TYPE_BYTE] != SPD_MEM_TYPE_DDR3)
268 return MV_DDR3_TRAINING_ERR_TWSI_BAD_TYPE;
269
270 /* Error Check Type */
271 /* No byte for error check in DDR3 SPD, use DDR2 convention */
272 info->err_check_type = 0;
273
274 /* Check if ECC */
275 if ((spd_data[SPD_BUS_WIDTH_BYTE] & 0x18) >> 3)
276 info->err_check_type = 1;
277
278 DEBUG_INIT_FULL_C("DRAM err_check_type ", info->err_check_type, 1);
279 switch (spd_data[SPD_MODULE_TYPE_BYTE]) {
280 case 1:
281 /* support RDIMM */
282 info->type_info = SPD_MODULE_TYPE_RDIMM;
283 break;
284 case 2:
285 /* support UDIMM */
286 info->type_info = SPD_MODULE_TYPE_UDIMM;
287 break;
288 case 11: /* LRDIMM current not supported */
289 default:
290 info->type_info = (spd_data[SPD_MODULE_TYPE_BYTE]);
291 break;
292 }
293
294 /* Size Calculations: */
295
296 /* Number Of Row Addresses - 12/13/14/15/16 */
297 info->num_of_row_addr =
298 (spd_data[SPD_ROW_NUM_BYTE] & SPD_ROW_NUM_MASK) >>
299 SPD_ROW_NUM_OFF;
300 info->num_of_row_addr += SPD_ROW_NUM_MIN;
301 DEBUG_INIT_FULL_C("DRAM num_of_row_addr ", info->num_of_row_addr, 2);
302
303 /* Number Of Column Addresses - 9/10/11/12 */
304 info->num_of_col_addr =
305 (spd_data[SPD_COL_NUM_BYTE] & SPD_COL_NUM_MASK) >>
306 SPD_COL_NUM_OFF;
307 info->num_of_col_addr += SPD_COL_NUM_MIN;
308 DEBUG_INIT_FULL_C("DRAM num_of_col_addr ", info->num_of_col_addr, 1);
309
310 /* Number Of Ranks = number of CS on Dimm - 1/2/3/4 Ranks */
311 info->num_of_module_ranks =
312 (spd_data[SPD_MODULE_ORG_BYTE] & SPD_MODULE_BANK_NUM_MASK) >>
313 SPD_MODULE_BANK_NUM_OFF;
314 info->num_of_module_ranks += SPD_MODULE_BANK_NUM_MIN;
315 DEBUG_INIT_FULL_C("DRAM numOfModuleBanks ", info->num_of_module_ranks,
316 1);
317
318 /* Data Width - 8/16/32/64 bits */
319 info->data_width =
320 1 << (3 + (spd_data[SPD_BUS_WIDTH_BYTE] & SPD_BUS_WIDTH_MASK));
321 DEBUG_INIT_FULL_C("DRAM data_width ", info->data_width, 1);
322
323 /* Number Of Banks On Each Device - 8/16/32/64 banks */
324 info->num_of_banks_on_each_device =
325 1 << (3 + ((spd_data[SPD_DEV_DENSITY_BYTE] >> 4) & 0x7));
326 DEBUG_INIT_FULL_C("DRAM num_of_banks_on_each_device ",
327 info->num_of_banks_on_each_device, 1);
328
329 /* Total SDRAM capacity - 256Mb/512Mb/1Gb/2Gb/4Gb/8Gb/16Gb - MegaBits */
330 info->sdram_capacity =
331 spd_data[SPD_DEV_DENSITY_BYTE] & SPD_DEV_DENSITY_MASK;
332
333 /* Sdram Width - 4/8/16/32 bits */
334 info->sdram_width = 1 << (2 + (spd_data[SPD_MODULE_ORG_BYTE] &
335 SPD_MODULE_SDRAM_DEV_WIDTH_MASK));
336 DEBUG_INIT_FULL_C("DRAM sdram_width ", info->sdram_width, 1);
337
338 /* CS (Rank) Capacity - MB */
339 /*
340 * DDR3 device uiDensity val are: (device capacity/8) *
341 * (Module_width/Device_width)
342 */
343 /* Jedec SPD DDR3 - page 7, Save spd_data in Mb - 2048=2GB */
344 if (dimm_width == 32) {
345 info->rank_capacity =
346 ((1 << info->sdram_capacity) * 256 *
347 (info->data_width / info->sdram_width)) << 16;
348 /* CS size = CS size / 2 */
349 } else {
350 info->rank_capacity =
351 ((1 << info->sdram_capacity) * 256 *
352 (info->data_width / info->sdram_width) * 0x2) << 16;
353 /* 0x2 => 0x100000-1Mbit / 8-bit->byte / 0x10000 */
354 }
355 DEBUG_INIT_FULL_C("DRAM rank_capacity[31] ", info->rank_capacity, 1);
356
357 /* Number of devices includeing Error correction */
358 info->num_of_devices =
359 ((info->data_width / info->sdram_width) *
360 info->num_of_module_ranks) + info->err_check_type;
361 DEBUG_INIT_FULL_C("DRAM num_of_devices ", info->num_of_devices, 1);
362
363 /* Address Mapping from Edge connector to DRAM - mirroring option */
364 info->addr_mirroring =
365 spd_data[SPD_ADDR_MAP_BYTE] & (1 << SPD_ADDR_MAP_MIRROR_OFFS);
366
367 /* Timings - All in ps */
368
369 time_base = (1000 * spd_data[SPD_MTB_DIVIDEND_BYTE]) /
370 spd_data[SPD_MTB_DIVISOR_BYTE];
371
372 /* Minimum Cycle Time At Max CasLatancy */
373 info->min_cycle_time = spd_data[SPD_TCK_BYTE] * time_base;
374 DEBUG_INIT_FULL_C("DRAM tCKmin ", info->min_cycle_time, 1);
375
376 /* Refresh Interval */
377 /* No byte for refresh interval in DDR3 SPD, use DDR2 convention */
378 /*
379 * JEDEC param are 0 <= Tcase <= 85: 7.8uSec, 85 <= Tcase
380 * <= 95: 3.9uSec
381 */
382 info->refresh_interval = 7800000; /* Set to 7.8uSec */
383 DEBUG_INIT_FULL_C("DRAM refresh_interval ", info->refresh_interval, 1);
384
385 /* Suported Cas Latencies - DDR 3: */
386
387 /*
388 * bit7 | bit6 | bit5 | bit4 | bit3 | bit2 | bit1 | bit0 *
389 *******-******-******-******-******-******-******-*******-*******
390 CAS = 11 | 10 | 9 | 8 | 7 | 6 | 5 | 4 *
391 *********************************************************-*******
392 *******-******-******-******-******-******-******-*******-*******
393 * bit15 |bit14 |bit13 |bit12 |bit11 |bit10 | bit9 | bit8 *
394 *******-******-******-******-******-******-******-*******-*******
395 CAS = TBD | 18 | 17 | 16 | 15 | 14 | 13 | 12 *
396 */
397
398 /* DDR3 include 2 byte of CAS support */
399 info->supported_cas_latencies =
400 (spd_data[SPD_SUP_CAS_LAT_MSB_BYTE] << 8) |
401 spd_data[SPD_SUP_CAS_LAT_LSB_BYTE];
402 DEBUG_INIT_FULL_C("DRAM supported_cas_latencies ",
403 info->supported_cas_latencies, 1);
404
405 /* Minimum Cycle Time At Max CasLatancy */
406 info->min_cas_lat_time = (spd_data[SPD_TAA_BYTE] * time_base);
407 /*
408 * This field divided by the cycleTime will give us the CAS latency
409 * to config
410 */
411
412 /*
413 * For DDR3 and DDR2 includes Write Recovery Time field.
414 * Other SDRAM ignore
415 */
416 info->min_write_recovery_time = spd_data[SPD_TWR_BYTE] * time_base;
417 DEBUG_INIT_FULL_C("DRAM min_write_recovery_time ",
418 info->min_write_recovery_time, 1);
419
420 /* Mininmum Ras to Cas Delay */
421 info->min_ras_to_cas_delay = spd_data[SPD_TRCD_BYTE] * time_base;
422 DEBUG_INIT_FULL_C("DRAM min_ras_to_cas_delay ",
423 info->min_ras_to_cas_delay, 1);
424
425 /* Minimum Row Active to Row Active Time */
426 info->min_row_active_to_row_active =
427 spd_data[SPD_TRRD_BYTE] * time_base;
428 DEBUG_INIT_FULL_C("DRAM min_row_active_to_row_active ",
429 info->min_row_active_to_row_active, 1);
430
431 /* Minimum Row Precharge Delay Time */
432 info->min_row_precharge_time = spd_data[SPD_TRP_BYTE] * time_base;
433 DEBUG_INIT_FULL_C("DRAM min_row_precharge_time ",
434 info->min_row_precharge_time, 1);
435
436 /* Minimum Active to Precharge Delay Time - tRAS ps */
437 info->min_active_to_precharge =
438 (spd_data[SPD_TRAS_MSB_BYTE] & SPD_TRAS_MSB_MASK) << 8;
439 info->min_active_to_precharge |= spd_data[SPD_TRAS_LSB_BYTE];
440 info->min_active_to_precharge *= time_base;
441 DEBUG_INIT_FULL_C("DRAM min_active_to_precharge ",
442 info->min_active_to_precharge, 1);
443
444 /* Minimum Refresh Recovery Delay Time - tRFC ps */
445 info->min_refresh_recovery = spd_data[SPD_TRFC_MSB_BYTE] << 8;
446 info->min_refresh_recovery |= spd_data[SPD_TRFC_LSB_BYTE];
447 info->min_refresh_recovery *= time_base;
448 DEBUG_INIT_FULL_C("DRAM min_refresh_recovery ",
449 info->min_refresh_recovery, 1);
450
451 /*
452 * For DDR3 and DDR2 includes Internal Write To Read Command Delay
453 * field.
454 */
455 info->min_write_to_read_cmd_delay = spd_data[SPD_TWTR_BYTE] * time_base;
456 DEBUG_INIT_FULL_C("DRAM min_write_to_read_cmd_delay ",
457 info->min_write_to_read_cmd_delay, 1);
458
459 /*
460 * For DDR3 and DDR2 includes Internal Read To Precharge Command Delay
461 * field.
462 */
463 info->min_read_to_prech_cmd_delay = spd_data[SPD_TRTP_BYTE] * time_base;
464 DEBUG_INIT_FULL_C("DRAM min_read_to_prech_cmd_delay ",
465 info->min_read_to_prech_cmd_delay, 1);
466
467 /*
468 * For DDR3 includes Minimum Activate to Activate/Refresh Command
469 * field
470 */
471 tmp = ((spd_data[SPD_TFAW_MSB_BYTE] & SPD_TFAW_MSB_MASK) << 8) |
472 spd_data[SPD_TFAW_LSB_BYTE];
473 info->min_four_active_win_delay = tmp * time_base;
474 DEBUG_INIT_FULL_C("DRAM min_four_active_win_delay ",
475 info->min_four_active_win_delay, 1);
476
477#if defined(MV88F78X60) || defined(MV88F672X)
478 /* Registered DIMM support */
479 if (info->type_info == SPD_MODULE_TYPE_RDIMM) {
480 for (rc = 2; rc < 6; rc += 2) {
481 tmp = spd_data[SPD_RDIMM_RC_BYTE + rc / 2];
482 info->dimm_rc[rc] =
483 spd_data[SPD_RDIMM_RC_BYTE + rc / 2] &
484 SPD_RDIMM_RC_NIBBLE_MASK;
485 info->dimm_rc[rc + 1] =
486 (spd_data[SPD_RDIMM_RC_BYTE + rc / 2] >> 4) &
487 SPD_RDIMM_RC_NIBBLE_MASK;
488 }
489
490 vendor_low = spd_data[66];
491 vendor_high = spd_data[65];
492 info->vendor = (vendor_high << 8) + vendor_low;
493 DEBUG_INIT_C("DDR3 Training Sequence - Registered DIMM vendor ID 0x",
494 info->vendor, 4);
495
496 info->dimm_rc[0] = RDIMM_RC0;
497 info->dimm_rc[1] = RDIMM_RC1;
498 info->dimm_rc[2] = RDIMM_RC2;
499 info->dimm_rc[8] = RDIMM_RC8;
500 info->dimm_rc[9] = RDIMM_RC9;
501 info->dimm_rc[10] = RDIMM_RC10;
502 info->dimm_rc[11] = RDIMM_RC11;
503 }
504#endif
505
506 return MV_OK;
507}
508
509/*
510 * Name: ddr3_spd_sum_init - Get the SPD parameters.
511 * Desc: Read the DIMM SPD parameters into given struct parameter.
512 * Args: dimmNum - DIMM number. See MV_BOARD_DIMM_NUM enumerator.
513 * info - DIMM information structure.
514 * Notes:
515 * Returns: MV_OK if function could read DIMM parameters, 0 otherwise.
516 */
517int ddr3_spd_sum_init(MV_DIMM_INFO *info, MV_DIMM_INFO *sum_info, u32 dimm)
518{
519 if (dimm == 0) {
520 memcpy(sum_info, info, sizeof(MV_DIMM_INFO));
521 return MV_OK;
522 }
523 if (sum_info->type_info != info->type_info) {
524 DEBUG_INIT_S("DDR3 Dimm Compare - DIMM type does not match - FAIL\n");
525 return MV_DDR3_TRAINING_ERR_DIMM_TYPE_NO_MATCH;
526 }
527 if (sum_info->err_check_type > info->err_check_type) {
528 sum_info->err_check_type = info->err_check_type;
529 DEBUG_INIT_S("DDR3 Dimm Compare - ECC does not match. ECC is disabled\n");
530 }
531 if (sum_info->data_width != info->data_width) {
532 DEBUG_INIT_S("DDR3 Dimm Compare - DRAM bus width does not match - FAIL\n");
533 return MV_DDR3_TRAINING_ERR_BUS_WIDTH_NOT_MATCH;
534 }
535 if (sum_info->min_cycle_time < info->min_cycle_time)
536 sum_info->min_cycle_time = info->min_cycle_time;
537 if (sum_info->refresh_interval < info->refresh_interval)
538 sum_info->refresh_interval = info->refresh_interval;
539 sum_info->supported_cas_latencies &= info->supported_cas_latencies;
540 if (sum_info->min_cas_lat_time < info->min_cas_lat_time)
541 sum_info->min_cas_lat_time = info->min_cas_lat_time;
542 if (sum_info->min_write_recovery_time < info->min_write_recovery_time)
543 sum_info->min_write_recovery_time =
544 info->min_write_recovery_time;
545 if (sum_info->min_ras_to_cas_delay < info->min_ras_to_cas_delay)
546 sum_info->min_ras_to_cas_delay = info->min_ras_to_cas_delay;
547 if (sum_info->min_row_active_to_row_active <
548 info->min_row_active_to_row_active)
549 sum_info->min_row_active_to_row_active =
550 info->min_row_active_to_row_active;
551 if (sum_info->min_row_precharge_time < info->min_row_precharge_time)
552 sum_info->min_row_precharge_time = info->min_row_precharge_time;
553 if (sum_info->min_active_to_precharge < info->min_active_to_precharge)
554 sum_info->min_active_to_precharge =
555 info->min_active_to_precharge;
556 if (sum_info->min_refresh_recovery < info->min_refresh_recovery)
557 sum_info->min_refresh_recovery = info->min_refresh_recovery;
558 if (sum_info->min_write_to_read_cmd_delay <
559 info->min_write_to_read_cmd_delay)
560 sum_info->min_write_to_read_cmd_delay =
561 info->min_write_to_read_cmd_delay;
562 if (sum_info->min_read_to_prech_cmd_delay <
563 info->min_read_to_prech_cmd_delay)
564 sum_info->min_read_to_prech_cmd_delay =
565 info->min_read_to_prech_cmd_delay;
566 if (sum_info->min_four_active_win_delay <
567 info->min_four_active_win_delay)
568 sum_info->min_four_active_win_delay =
569 info->min_four_active_win_delay;
570 if (sum_info->min_write_to_read_cmd_delay <
571 info->min_write_to_read_cmd_delay)
572 sum_info->min_write_to_read_cmd_delay =
573 info->min_write_to_read_cmd_delay;
574
575 return MV_OK;
576}
577
578/*
579 * Name: ddr3_dunit_setup
580 * Desc: Set the controller with the timing values.
581 * Args: ecc_ena - User ECC setup
582 * Notes:
583 * Returns:
584 */
585int ddr3_dunit_setup(u32 ecc_ena, u32 hclk_time, u32 *ddr_width)
586{
587 u32 reg, tmp, cwl;
588 u32 ddr_clk_time;
589 MV_DIMM_INFO dimm_info[2];
590 MV_DIMM_INFO sum_info;
591 u32 stat_val, spd_val;
592 u32 cs, cl, cs_num, cs_ena;
593 u32 dimm_num = 0;
594 int status;
595 u32 rc;
596 __maybe_unused u32 dimm_cnt, cs_count, dimm;
597 __maybe_unused u32 dimm_addr[2] = { 0, 0 };
598
599#if defined(DB_88F6710) || defined(DB_88F6710_PCAC) || defined(RD_88F6710)
600 /* Armada 370 - SPD is not available on DIMM */
601 /*
602 * Set MC registers according to Static SPD values Values -
603 * must be set manually
604 */
605 /*
606 * We only have one optional DIMM for the DB and we already got the
607 * SPD matching values
608 */
609 status = ddr3_spd_init(&dimm_info[0], 0, *ddr_width);
610 if (MV_OK != status)
611 return status;
612
613 dimm_num = 1;
614 /* Use JP8 to enable multiCS support for Armada 370 DB */
615 if (!ddr3_check_config(EEPROM_MODULE_ADDR, CONFIG_MULTI_CS))
616 dimm_info[0].num_of_module_ranks = 1;
617 status = ddr3_spd_sum_init(&dimm_info[0], &sum_info, 0);
618 if (MV_OK != status)
619 return status;
620#else
621 /* Dynamic D-Unit Setup - Read SPD values */
622#ifdef DUNIT_SPD
623 dimm_num = ddr3_get_dimm_num(dimm_addr);
624 if (dimm_num == 0) {
625#ifdef MIXED_DIMM_STATIC
626 DEBUG_INIT_S("DDR3 Training Sequence - No DIMMs detected\n");
627#else
628 DEBUG_INIT_S("DDR3 Training Sequence - FAILED (Wrong DIMMs Setup)\n");
629 return MV_DDR3_TRAINING_ERR_BAD_DIMM_SETUP;
630#endif
631 } else {
632 DEBUG_INIT_C("DDR3 Training Sequence - Number of DIMMs detected: ",
633 dimm_num, 1);
634 }
635
636 for (dimm = 0; dimm < dimm_num; dimm++) {
637 status = ddr3_spd_init(&dimm_info[dimm], dimm_addr[dimm],
638 *ddr_width);
639 if (MV_OK != status)
640 return status;
641 status = ddr3_spd_sum_init(&dimm_info[dimm], &sum_info, dimm);
642 if (MV_OK != status)
643 return status;
644 }
645#endif
646#endif
647
648 /* Set number of enabled CS */
649 cs_num = 0;
650#ifdef DUNIT_STATIC
651 cs_num = ddr3_get_cs_num_from_reg();
652#endif
653#ifdef DUNIT_SPD
654 for (dimm = 0; dimm < dimm_num; dimm++)
655 cs_num += dimm_info[dimm].num_of_module_ranks;
656#endif
657 if (cs_num > MAX_CS) {
658 DEBUG_INIT_C("DDR3 Training Sequence - Number of CS exceed limit - ",
659 MAX_CS, 1);
660 return MV_DDR3_TRAINING_ERR_MAX_CS_LIMIT;
661 }
662
663 /* Set bitmap of enabled CS */
664 cs_ena = 0;
665#ifdef DUNIT_STATIC
666 cs_ena = ddr3_get_cs_ena_from_reg();
667#endif
668#ifdef DUNIT_SPD
669 dimm = 0;
670
671 if (dimm_num) {
672 for (cs = 0; cs < MAX_CS; cs += 2) {
673 if (((1 << cs) & DIMM_CS_BITMAP) &&
674 !(cs_ena & (1 << cs))) {
675 if (dimm_info[dimm].num_of_module_ranks == 1)
676 cs_ena |= (0x1 << cs);
677 else if (dimm_info[dimm].num_of_module_ranks == 2)
678 cs_ena |= (0x3 << cs);
679 else if (dimm_info[dimm].num_of_module_ranks == 3)
680 cs_ena |= (0x7 << cs);
681 else if (dimm_info[dimm].num_of_module_ranks == 4)
682 cs_ena |= (0xF << cs);
683
684 dimm++;
685 if (dimm == dimm_num)
686 break;
687 }
688 }
689 }
690#endif
691
692 if (cs_ena > 0xF) {
693 DEBUG_INIT_C("DDR3 Training Sequence - Number of enabled CS exceed limit - ",
694 MAX_CS, 1);
695 return MV_DDR3_TRAINING_ERR_MAX_ENA_CS_LIMIT;
696 }
697
698 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - Number of CS = ", cs_num, 1);
699
700 /* Check Ratio - '1' - 2:1, '0' - 1:1 */
701 if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
702 ddr_clk_time = hclk_time / 2;
703 else
704 ddr_clk_time = hclk_time;
705
706#ifdef DUNIT_STATIC
707 /* Get target CL value from set register */
708 reg = (reg_read(REG_DDR3_MR0_ADDR) >> 2);
709 reg = ((((reg >> 1) & 0xE)) | (reg & 0x1)) & 0xF;
710
711 cl = ddr3_get_max_val(ddr3_div(sum_info.min_cas_lat_time,
712 ddr_clk_time, 0),
713 dimm_num, ddr3_valid_cl_to_cl(reg));
714#else
715 cl = ddr3_div(sum_info.min_cas_lat_time, ddr_clk_time, 0);
716#endif
717 if (cl < 5)
718 cl = 5;
719
720 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - Cas Latency = ", cl, 1);
721
722 /* {0x00001400} - DDR SDRAM Configuration Register */
723 reg = 0x73004000;
724 stat_val = ddr3_get_static_mc_value(
725 REG_SDRAM_CONFIG_ADDR, REG_SDRAM_CONFIG_ECC_OFFS, 0x1, 0, 0);
726 if (ecc_ena && ddr3_get_min_val(sum_info.err_check_type, dimm_num,
727 stat_val)) {
728 reg |= (1 << REG_SDRAM_CONFIG_ECC_OFFS);
729 reg |= (1 << REG_SDRAM_CONFIG_IERR_OFFS);
730 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - ECC Enabled\n");
731 } else {
732 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - ECC Disabled\n");
733 }
734
735 if (sum_info.type_info == SPD_MODULE_TYPE_RDIMM) {
736#ifdef DUNIT_STATIC
737 DEBUG_INIT_S("DDR3 Training Sequence - FAIL - Illegal R-DIMM setup\n");
738 return MV_DDR3_TRAINING_ERR_BAD_R_DIMM_SETUP;
739#endif
740 reg |= (1 << REG_SDRAM_CONFIG_REGDIMM_OFFS);
741 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - R-DIMM\n");
742 } else {
743 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - U-DIMM\n");
744 }
745
746#ifndef MV88F67XX
747#ifdef DUNIT_STATIC
748 if (ddr3_get_min_val(sum_info.data_width, dimm_num, BUS_WIDTH) == 64) {
749#else
750 if (*ddr_width == 64) {
751#endif
752 reg |= (1 << REG_SDRAM_CONFIG_WIDTH_OFFS);
753 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 64Bits\n");
754 } else {
755 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 32Bits\n");
756 }
757#else
758 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 16Bits\n");
759#endif
760
761#if defined(MV88F672X)
762 if (*ddr_width == 32) {
763 reg |= (1 << REG_SDRAM_CONFIG_WIDTH_OFFS);
764 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 32Bits\n");
765 } else {
766 DEBUG_INIT_FULL_S("DDR3 - DUNIT-SET - Datawidth - 16Bits\n");
767 }
768#endif
769 stat_val = ddr3_get_static_mc_value(REG_SDRAM_CONFIG_ADDR, 0,
770 REG_SDRAM_CONFIG_RFRS_MASK, 0, 0);
771 tmp = ddr3_get_min_val(sum_info.refresh_interval / hclk_time,
772 dimm_num, stat_val);
773
774#ifdef TREFI_USER_EN
775 tmp = min(TREFI_USER / hclk_time, tmp);
776#endif
777
778 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - RefreshInterval/Hclk = ", tmp, 4);
779 reg |= tmp;
780
781 if (cl != 3)
782 reg |= (1 << 16); /* If 2:1 need to set P2DWr */
783
784#if defined(MV88F672X)
785 reg |= (1 << 27); /* PhyRfRST = Disable */
786#endif
787 reg_write(REG_SDRAM_CONFIG_ADDR, reg);
788
789 /*{0x00001404} - DDR SDRAM Configuration Register */
790 reg = 0x3630B800;
791#ifdef DUNIT_SPD
792 reg |= (DRAM_2T << REG_DUNIT_CTRL_LOW_2T_OFFS);
793#endif
794 reg_write(REG_DUNIT_CTRL_LOW_ADDR, reg);
795
796 /* {0x00001408} - DDR SDRAM Timing (Low) Register */
797 reg = 0x0;
798
799 /* tRAS - (0:3,20) */
800 spd_val = ddr3_div(sum_info.min_active_to_precharge,
801 ddr_clk_time, 1);
802 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
803 0, 0xF, 16, 0x10);
804 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
805 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRAS-1 = ", tmp, 1);
806 reg |= (tmp & 0xF);
807 reg |= ((tmp & 0x10) << 16); /* to bit 20 */
808
809 /* tRCD - (4:7) */
810 spd_val = ddr3_div(sum_info.min_ras_to_cas_delay, ddr_clk_time, 1);
811 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
812 4, 0xF, 0, 0);
813 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
814 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRCD-1 = ", tmp, 1);
815 reg |= ((tmp & 0xF) << 4);
816
817 /* tRP - (8:11) */
818 spd_val = ddr3_div(sum_info.min_row_precharge_time, ddr_clk_time, 1);
819 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
820 8, 0xF, 0, 0);
821 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
822 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRP-1 = ", tmp, 1);
823 reg |= ((tmp & 0xF) << 8);
824
825 /* tWR - (12:15) */
826 spd_val = ddr3_div(sum_info.min_write_recovery_time, ddr_clk_time, 1);
827 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
828 12, 0xF, 0, 0);
829 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
830 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tWR-1 = ", tmp, 1);
831 reg |= ((tmp & 0xF) << 12);
832
833 /* tWTR - (16:19) */
834 spd_val = ddr3_div(sum_info.min_write_to_read_cmd_delay, ddr_clk_time, 1);
835 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
836 16, 0xF, 0, 0);
837 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
838 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tWTR-1 = ", tmp, 1);
839 reg |= ((tmp & 0xF) << 16);
840
841 /* tRRD - (24:27) */
842 spd_val = ddr3_div(sum_info.min_row_active_to_row_active, ddr_clk_time, 1);
843 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
844 24, 0xF, 0, 0);
845 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
846 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRRD-1 = ", tmp, 1);
847 reg |= ((tmp & 0xF) << 24);
848
849 /* tRTP - (28:31) */
850 spd_val = ddr3_div(sum_info.min_read_to_prech_cmd_delay, ddr_clk_time, 1);
851 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_LOW_ADDR,
852 28, 0xF, 0, 0);
853 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
854 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRTP-1 = ", tmp, 1);
855 reg |= ((tmp & 0xF) << 28);
856
857 if (cl < 7)
858 reg = 0x33137663;
859
860 reg_write(REG_SDRAM_TIMING_LOW_ADDR, reg);
861
862 /*{0x0000140C} - DDR SDRAM Timing (High) Register */
863 /* Add cycles to R2R W2W */
864 reg = 0x39F8FF80;
865
866 /* tRFC - (0:6,16:18) */
867 spd_val = ddr3_div(sum_info.min_refresh_recovery, ddr_clk_time, 1);
868 stat_val = ddr3_get_static_mc_value(REG_SDRAM_TIMING_HIGH_ADDR,
869 0, 0x7F, 9, 0x380);
870 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
871 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tRFC-1 = ", tmp, 1);
872 reg |= (tmp & 0x7F);
873 reg |= ((tmp & 0x380) << 9); /* to bit 16 */
874 reg_write(REG_SDRAM_TIMING_HIGH_ADDR, reg);
875
876 /*{0x00001410} - DDR SDRAM Address Control Register */
877 reg = 0x000F0000;
878
879 /* tFAW - (24:28) */
880#if (defined(MV88F78X60) || defined(MV88F672X))
881 tmp = sum_info.min_four_active_win_delay;
882 spd_val = ddr3_div(tmp, ddr_clk_time, 0);
883 stat_val = ddr3_get_static_mc_value(REG_SDRAM_ADDRESS_CTRL_ADDR,
884 24, 0x3F, 0, 0);
885 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
886 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tFAW = ", tmp, 1);
887 reg |= ((tmp & 0x3F) << 24);
888#else
889 tmp = sum_info.min_four_active_win_delay -
890 4 * (sum_info.min_row_active_to_row_active);
891 spd_val = ddr3_div(tmp, ddr_clk_time, 0);
892 stat_val = ddr3_get_static_mc_value(REG_SDRAM_ADDRESS_CTRL_ADDR,
893 24, 0x1F, 0, 0);
894 tmp = ddr3_get_max_val(spd_val, dimm_num, stat_val);
895 DEBUG_INIT_FULL_C("DDR3 - DUNIT-SET - tFAW-4*tRRD = ", tmp, 1);
896 reg |= ((tmp & 0x1F) << 24);
897#endif
898
899 /* SDRAM device capacity */
900#ifdef DUNIT_STATIC
901 reg |= (reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR) & 0xF0FFFF);
902#endif
903
904#ifdef DUNIT_SPD
905 cs_count = 0;
906 dimm_cnt = 0;
907 for (cs = 0; cs < MAX_CS; cs++) {
908 if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
909 if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
910 dimm_cnt++;
911 cs_count = 0;
912 }
913 cs_count++;
914 if (dimm_info[dimm_cnt].sdram_capacity < 0x3) {
915 reg |= ((dimm_info[dimm_cnt].sdram_capacity + 1) <<
916 (REG_SDRAM_ADDRESS_SIZE_OFFS +
917 (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs)));
918 } else if (dimm_info[dimm_cnt].sdram_capacity > 0x3) {
919 reg |= ((dimm_info[dimm_cnt].sdram_capacity & 0x3) <<
920 (REG_SDRAM_ADDRESS_SIZE_OFFS +
921 (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs)));
922 reg |= ((dimm_info[dimm_cnt].sdram_capacity & 0x4) <<
923 (REG_SDRAM_ADDRESS_SIZE_HIGH_OFFS + cs));
924 }
925 }
926 }
927
928 /* SDRAM device structure */
929 cs_count = 0;
930 dimm_cnt = 0;
931 for (cs = 0; cs < MAX_CS; cs++) {
932 if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
933 if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
934 dimm_cnt++;
935 cs_count = 0;
936 }
937 cs_count++;
938 if (dimm_info[dimm_cnt].sdram_width == 16)
939 reg |= (1 << (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs));
940 }
941 }
942#endif
943 reg_write(REG_SDRAM_ADDRESS_CTRL_ADDR, reg);
944
945 /*{0x00001418} - DDR SDRAM Operation Register */
946 reg = 0xF00;
947 for (cs = 0; cs < MAX_CS; cs++) {
948 if (cs_ena & (1 << cs))
949 reg &= ~(1 << (cs + REG_SDRAM_OPERATION_CS_OFFS));
950 }
951 reg_write(REG_SDRAM_OPERATION_ADDR, reg);
952
953 /*{0x00001420} - DDR SDRAM Extended Mode Register */
954 reg = 0x00000004;
955 reg_write(REG_SDRAM_EXT_MODE_ADDR, reg);
956
957 /*{0x00001424} - DDR Controller Control (High) Register */
958#if (defined(MV88F78X60) || defined(MV88F672X))
959 reg = 0x0000D3FF;
960#else
961 reg = 0x0100D1FF;
962#endif
963 reg_write(REG_DDR_CONT_HIGH_ADDR, reg);
964
965 /*{0x0000142C} - DDR3 Timing Register */
966 reg = 0x014C2F38;
967#if defined(MV88F78X60) || defined(MV88F672X)
968 reg = 0x1FEC2F38;
969#endif
970 reg_write(0x142C, reg);
971
972 /*{0x00001484} - MBus CPU Block Register */
973#ifdef MV88F67XX
974 if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
975 reg_write(REG_MBUS_CPU_BLOCK_ADDR, 0x0000E907);
976#endif
977
978 /*
979 * In case of mixed dimm and on-board devices setup paramters will
980 * be taken statically
981 */
982 /*{0x00001494} - DDR SDRAM ODT Control (Low) Register */
983 reg = odt_config[cs_ena];
984 reg_write(REG_SDRAM_ODT_CTRL_LOW_ADDR, reg);
985
986 /*{0x00001498} - DDR SDRAM ODT Control (High) Register */
987 reg = 0x00000000;
988 reg_write(REG_SDRAM_ODT_CTRL_HIGH_ADDR, reg);
989
990 /*{0x0000149C} - DDR Dunit ODT Control Register */
991 reg = cs_ena;
992 reg_write(REG_DUNIT_ODT_CTRL_ADDR, reg);
993
994 /*{0x000014A0} - DDR Dunit ODT Control Register */
995#if defined(MV88F672X)
996 reg = 0x000006A9;
997 reg_write(REG_DRAM_FIFO_CTRL_ADDR, reg);
998#endif
999
1000 /*{0x000014C0} - DRAM address and Control Driving Strenght */
1001 reg_write(REG_DRAM_ADDR_CTRL_DRIVE_STRENGTH_ADDR, 0x192435e9);
1002
1003 /*{0x000014C4} - DRAM Data and DQS Driving Strenght */
1004 reg_write(REG_DRAM_DATA_DQS_DRIVE_STRENGTH_ADDR, 0xB2C35E9);
1005
1006#if (defined(MV88F78X60) || defined(MV88F672X))
1007 /*{0x000014CC} - DRAM Main Pads Calibration Machine Control Register */
1008 reg = reg_read(REG_DRAM_MAIN_PADS_CAL_ADDR);
1009 reg_write(REG_DRAM_MAIN_PADS_CAL_ADDR, reg | (1 << 0));
1010#endif
1011
1012#if defined(MV88F672X)
1013 /* DRAM Main Pads Calibration Machine Control Register */
1014 /* 0x14CC[4:3] - CalUpdateControl = IntOnly */
1015 reg = reg_read(REG_DRAM_MAIN_PADS_CAL_ADDR);
1016 reg &= 0xFFFFFFE7;
1017 reg |= (1 << 3);
1018 reg_write(REG_DRAM_MAIN_PADS_CAL_ADDR, reg);
1019#endif
1020
1021#ifdef DUNIT_SPD
1022 cs_count = 0;
1023 dimm_cnt = 0;
1024 for (cs = 0; cs < MAX_CS; cs++) {
1025 if ((1 << cs) & DIMM_CS_BITMAP) {
1026 if ((1 << cs) & cs_ena) {
1027 if (dimm_info[dimm_cnt].num_of_module_ranks ==
1028 cs_count) {
1029 dimm_cnt++;
1030 cs_count = 0;
1031 }
1032 cs_count++;
1033 reg_write(REG_CS_SIZE_SCRATCH_ADDR + (cs * 0x8),
1034 dimm_info[dimm_cnt].rank_capacity - 1);
1035 } else {
1036 reg_write(REG_CS_SIZE_SCRATCH_ADDR + (cs * 0x8), 0);
1037 }
1038 }
1039 }
1040#endif
1041
1042 /*{0x00020184} - Close FastPath - 2G */
1043 reg_write(REG_FASTPATH_WIN_0_CTRL_ADDR, 0);
1044
1045 /*{0x00001538} - Read Data Sample Delays Register */
1046 reg = 0;
1047 for (cs = 0; cs < MAX_CS; cs++) {
1048 if (cs_ena & (1 << cs))
1049 reg |= (cl << (REG_READ_DATA_SAMPLE_DELAYS_OFFS * cs));
1050 }
1051
1052 reg_write(REG_READ_DATA_SAMPLE_DELAYS_ADDR, reg);
1053 DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Read Data Sample Delays = ", reg,
1054 1);
1055
1056 /*{0x0000153C} - Read Data Ready Delay Register */
1057 reg = 0;
1058 for (cs = 0; cs < MAX_CS; cs++) {
1059 if (cs_ena & (1 << cs)) {
1060 reg |= ((cl + 2) <<
1061 (REG_READ_DATA_READY_DELAYS_OFFS * cs));
1062 }
1063 }
1064 reg_write(REG_READ_DATA_READY_DELAYS_ADDR, reg);
1065 DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Read Data Ready Delays = ", reg, 1);
1066
1067 /* Set MR registers */
1068 /* MR0 */
1069 reg = 0x00000600;
1070 tmp = ddr3_cl_to_valid_cl(cl);
1071 reg |= ((tmp & 0x1) << 2);
1072 reg |= ((tmp & 0xE) << 3); /* to bit 4 */
1073 for (cs = 0; cs < MAX_CS; cs++) {
1074 if (cs_ena & (1 << cs)) {
1075 reg_write(REG_DDR3_MR0_CS_ADDR +
1076 (cs << MR_CS_ADDR_OFFS), reg);
1077 }
1078 }
1079
1080 /* MR1 */
1081 reg = 0x00000044 & REG_DDR3_MR1_ODT_MASK;
1082 if (cs_num > 1)
1083 reg = 0x00000046 & REG_DDR3_MR1_ODT_MASK;
1084
1085 for (cs = 0; cs < MAX_CS; cs++) {
1086 if (cs_ena & (1 << cs)) {
1087 reg |= odt_static[cs_ena][cs];
1088 reg_write(REG_DDR3_MR1_CS_ADDR +
1089 (cs << MR_CS_ADDR_OFFS), reg);
1090 }
1091 }
1092
1093 /* MR2 */
1094 if (reg_read(REG_DDR_IO_ADDR) & (1 << REG_DDR_IO_CLK_RATIO_OFFS))
1095 tmp = hclk_time / 2;
1096 else
1097 tmp = hclk_time;
1098
1099 if (tmp >= 2500)
1100 cwl = 5; /* CWL = 5 */
1101 else if (tmp >= 1875 && tmp < 2500)
1102 cwl = 6; /* CWL = 6 */
1103 else if (tmp >= 1500 && tmp < 1875)
1104 cwl = 7; /* CWL = 7 */
1105 else if (tmp >= 1250 && tmp < 1500)
1106 cwl = 8; /* CWL = 8 */
1107 else if (tmp >= 1070 && tmp < 1250)
1108 cwl = 9; /* CWL = 9 */
1109 else if (tmp >= 935 && tmp < 1070)
1110 cwl = 10; /* CWL = 10 */
1111 else if (tmp >= 833 && tmp < 935)
1112 cwl = 11; /* CWL = 11 */
1113 else if (tmp >= 750 && tmp < 833)
1114 cwl = 12; /* CWL = 12 */
1115 else {
1116 cwl = 12; /* CWL = 12 */
1117 printf("Unsupported hclk %d MHz\n", tmp);
1118 }
1119
1120 reg = ((cwl - 5) << REG_DDR3_MR2_CWL_OFFS);
1121
1122 for (cs = 0; cs < MAX_CS; cs++) {
1123 if (cs_ena & (1 << cs)) {
1124 reg &= REG_DDR3_MR2_ODT_MASK;
1125 reg |= odt_dynamic[cs_ena][cs];
1126 reg_write(REG_DDR3_MR2_CS_ADDR +
1127 (cs << MR_CS_ADDR_OFFS), reg);
1128 }
1129 }
1130
1131 /* MR3 */
1132 reg = 0x00000000;
1133 for (cs = 0; cs < MAX_CS; cs++) {
1134 if (cs_ena & (1 << cs)) {
1135 reg_write(REG_DDR3_MR3_CS_ADDR +
1136 (cs << MR_CS_ADDR_OFFS), reg);
1137 }
1138 }
1139
1140 /* {0x00001428} - DDR ODT Timing (Low) Register */
1141 reg = 0;
1142 reg |= (((cl - cwl + 1) & 0xF) << 4);
1143 reg |= (((cl - cwl + 6) & 0xF) << 8);
1144 reg |= ((((cl - cwl + 6) >> 4) & 0x1) << 21);
1145 reg |= (((cl - 1) & 0xF) << 12);
1146 reg |= (((cl + 6) & 0x1F) << 16);
1147 reg_write(REG_ODT_TIME_LOW_ADDR, reg);
1148
1149 /* {0x0000147C} - DDR ODT Timing (High) Register */
1150 reg = 0x00000071;
1151 reg |= ((cwl - 1) << 8);
1152 reg |= ((cwl + 5) << 12);
1153 reg_write(REG_ODT_TIME_HIGH_ADDR, reg);
1154
1155#ifdef DUNIT_SPD
1156 /*{0x000015E0} - DDR3 Rank Control Register */
1157 reg = cs_ena;
1158 cs_count = 0;
1159 dimm_cnt = 0;
1160 for (cs = 0; cs < MAX_CS; cs++) {
1161 if (cs_ena & (1 << cs) & DIMM_CS_BITMAP) {
1162 if (dimm_info[dimm_cnt].num_of_module_ranks == cs_count) {
1163 dimm_cnt++;
1164 cs_count = 0;
1165 }
1166 cs_count++;
1167
1168 if (dimm_info[dimm_cnt].addr_mirroring &&
1169 (cs == 1 || cs == 3) &&
1170 (sum_info.type_info != SPD_MODULE_TYPE_RDIMM)) {
1171 reg |= (1 << (REG_DDR3_RANK_CTRL_MIRROR_OFFS + cs));
1172 DEBUG_INIT_FULL_C("DDR3 - SPD-SET - Setting Address Mirroring for CS = ",
1173 cs, 1);
1174 }
1175 }
1176 }
1177 reg_write(REG_DDR3_RANK_CTRL_ADDR, reg);
1178#endif
1179
1180 /*{0xD00015E4} - ZQDS Configuration Register */
1181 reg = 0x00203c18;
1182 reg_write(REG_ZQC_CONF_ADDR, reg);
1183
1184 /* {0x00015EC} - DDR PHY */
1185#if defined(MV88F78X60)
1186 reg = 0xF800AAA5;
1187 if (mv_ctrl_rev_get() == MV_78XX0_B0_REV)
1188 reg = 0xF800A225;
1189#else
1190 reg = 0xDE000025;
1191#if defined(MV88F672X)
1192 reg = 0xF800A225;
1193#endif
1194#endif
1195 reg_write(REG_DRAM_PHY_CONFIG_ADDR, reg);
1196
1197#if (defined(MV88F78X60) || defined(MV88F672X))
1198 /* Registered DIMM support - supported only in AXP A0 devices */
1199 /* Currently supported for SPD detection only */
1200 /*
1201 * Flow is according to the Registered DIMM chapter in the
1202 * Functional Spec
1203 */
1204 if (sum_info.type_info == SPD_MODULE_TYPE_RDIMM) {
1205 DEBUG_INIT_S("DDR3 Training Sequence - Registered DIMM detected\n");
1206
1207 /* Set commands parity completion */
1208 reg = reg_read(REG_REGISTERED_DRAM_CTRL_ADDR);
1209 reg &= ~REG_REGISTERED_DRAM_CTRL_PARITY_MASK;
1210 reg |= 0x8;
1211 reg_write(REG_REGISTERED_DRAM_CTRL_ADDR, reg);
1212
1213 /* De-assert M_RESETn and assert M_CKE */
1214 reg_write(REG_SDRAM_INIT_CTRL_ADDR,
1215 1 << REG_SDRAM_INIT_CKE_ASSERT_OFFS);
1216 do {
1217 reg = (reg_read(REG_SDRAM_INIT_CTRL_ADDR)) &
1218 (1 << REG_SDRAM_INIT_CKE_ASSERT_OFFS);
1219 } while (reg);
1220
1221 for (rc = 0; rc < SPD_RDIMM_RC_NUM; rc++) {
1222 if (rc != 6 && rc != 7) {
1223 /* Set CWA Command */
1224 reg = (REG_SDRAM_OPERATION_CMD_CWA &
1225 ~(0xF << REG_SDRAM_OPERATION_CS_OFFS));
1226 reg |= ((dimm_info[0].dimm_rc[rc] &
1227 REG_SDRAM_OPERATION_CWA_DATA_MASK) <<
1228 REG_SDRAM_OPERATION_CWA_DATA_OFFS);
1229 reg |= rc << REG_SDRAM_OPERATION_CWA_RC_OFFS;
1230 /* Configure - Set Delay - tSTAB/tMRD */
1231 if (rc == 2 || rc == 10)
1232 reg |= (0x1 << REG_SDRAM_OPERATION_CWA_DELAY_SEL_OFFS);
1233 /* 0x1418 - SDRAM Operation Register */
1234 reg_write(REG_SDRAM_OPERATION_ADDR, reg);
1235
1236 /*
1237 * Poll the "cmd" field in the SDRAM OP
1238 * register for 0x0
1239 */
1240 do {
1241 reg = reg_read(REG_SDRAM_OPERATION_ADDR) &
1242 (REG_SDRAM_OPERATION_CMD_MASK);
1243 } while (reg);
1244 }
1245 }
1246 }
1247#endif
1248
1249 return MV_OK;
1250}
1251
1252/*
1253 * Name: ddr3_div - this function divides integers
1254 * Desc:
1255 * Args: val - the value
1256 * divider - the divider
1257 * sub - substruction value
1258 * Notes:
1259 * Returns: required value
1260 */
1261u32 ddr3_div(u32 val, u32 divider, u32 sub)
1262{
1263 return val / divider + (val % divider > 0 ? 1 : 0) - sub;
1264}
1265
1266/*
1267 * Name: ddr3_get_max_val
1268 * Desc:
1269 * Args:
1270 * Notes:
1271 * Returns:
1272 */
1273u32 ddr3_get_max_val(u32 spd_val, u32 dimm_num, u32 static_val)
1274{
1275#ifdef DUNIT_STATIC
1276 if (dimm_num > 0) {
1277 if (spd_val >= static_val)
1278 return spd_val;
1279 else
1280 return static_val;
1281 } else {
1282 return static_val;
1283 }
1284#else
1285 return spd_val;
1286#endif
1287}
1288
1289/*
1290 * Name: ddr3_get_min_val
1291 * Desc:
1292 * Args:
1293 * Notes:
1294 * Returns:
1295 */
1296u32 ddr3_get_min_val(u32 spd_val, u32 dimm_num, u32 static_val)
1297{
1298#ifdef DUNIT_STATIC
1299 if (dimm_num > 0) {
1300 if (spd_val <= static_val)
1301 return spd_val;
1302 else
1303 return static_val;
1304 } else
1305 return static_val;
1306#else
1307 return spd_val;
1308#endif
1309}
1310#endif