blob: 216e05c7c3d128cbc8ef70bff68dfd38968380aa [file] [log] [blame]
Pankaj Guptac518de42020-12-09 14:02:39 +05301/*
2 * Copyright 2021 NXP
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <errno.h>
8#include <stdint.h>
9#include <stdio.h>
10#include <stdlib.h>
11#include <string.h>
12
13#include <common/debug.h>
14#include <ddr.h>
15#ifndef CONFIG_DDR_NODIMM
16#include <i2c.h>
17#endif
18#include <nxp_timer.h>
19
20struct dynamic_odt {
21 unsigned int odt_rd_cfg;
22 unsigned int odt_wr_cfg;
23 unsigned int odt_rtt_norm;
24 unsigned int odt_rtt_wr;
25};
26
27#ifndef CONFIG_STATIC_DDR
28#if defined(PHY_GEN2_FW_IMAGE_BUFFER) && !defined(NXP_DDR_PHY_GEN2)
29#error Missing NXP_DDR_PHY_GEN2
30#endif
31#ifdef NXP_DDR_PHY_GEN2
32static const struct dynamic_odt single_D[4] = {
33 { /* cs0 */
34 DDR_ODT_NEVER,
35 DDR_ODT_ALL,
36 DDR4_RTT_80_OHM,
37 DDR4_RTT_WR_OFF
38 },
39 { /* cs1 */
40 DDR_ODT_NEVER,
41 DDR_ODT_NEVER,
42 DDR4_RTT_OFF,
43 DDR4_RTT_WR_OFF
44 },
45 {},
46 {}
47};
48
49static const struct dynamic_odt single_S[4] = {
50 { /* cs0 */
51 DDR_ODT_NEVER,
52 DDR_ODT_ALL,
53 DDR4_RTT_80_OHM,
54 DDR4_RTT_WR_OFF
55 },
56 {},
57 {},
58 {},
59};
60
61static const struct dynamic_odt dual_DD[4] = {
62 { /* cs0 */
63 DDR_ODT_OTHER_DIMM,
64 DDR_ODT_ALL,
65 DDR4_RTT_60_OHM,
66 DDR4_RTT_WR_240_OHM
67 },
68 { /* cs1 */
69 DDR_ODT_OTHER_DIMM,
70 DDR_ODT_ALL,
71 DDR4_RTT_60_OHM,
72 DDR4_RTT_WR_240_OHM
73 },
74 { /* cs2 */
75 DDR_ODT_OTHER_DIMM,
76 DDR_ODT_ALL,
77 DDR4_RTT_60_OHM,
78 DDR4_RTT_WR_240_OHM
79 },
80 { /* cs3 */
81 DDR_ODT_OTHER_DIMM,
82 DDR_ODT_ALL,
83 DDR4_RTT_60_OHM,
84 DDR4_RTT_WR_240_OHM
85 }
86};
87
88static const struct dynamic_odt dual_SS[4] = {
89 { /* cs0 */
90 DDR_ODT_NEVER,
91 DDR_ODT_ALL,
92 DDR4_RTT_80_OHM,
93 DDR4_RTT_WR_OFF
94 },
95 {},
96 { /* cs2 */
97 DDR_ODT_NEVER,
98 DDR_ODT_ALL,
99 DDR4_RTT_80_OHM,
100 DDR4_RTT_WR_OFF
101 },
102 {}
103};
104
105static const struct dynamic_odt dual_D0[4] = {
106 { /* cs0 */
107 DDR_ODT_NEVER,
108 DDR_ODT_SAME_DIMM,
109 DDR4_RTT_80_OHM,
110 DDR4_RTT_WR_OFF
111 },
112 { /* cs1 */
113 DDR_ODT_NEVER,
114 DDR_ODT_NEVER,
115 DDR4_RTT_80_OHM,
116 DDR4_RTT_WR_OFF
117 },
118 {},
119 {}
120};
121
122static const struct dynamic_odt dual_S0[4] = {
123 { /* cs0 */
124 DDR_ODT_NEVER,
125 DDR_ODT_CS,
126 DDR4_RTT_80_OHM,
127 DDR4_RTT_WR_OFF
128 },
129 {},
130 {},
131 {}
132};
133#else
134static const struct dynamic_odt single_D[4] = {
135 { /* cs0 */
136 DDR_ODT_NEVER,
137 DDR_ODT_ALL,
138 DDR4_RTT_40_OHM,
139 DDR4_RTT_WR_OFF
140 },
141 { /* cs1 */
142 DDR_ODT_NEVER,
143 DDR_ODT_NEVER,
144 DDR4_RTT_OFF,
145 DDR4_RTT_WR_OFF
146 },
147 {},
148 {}
149};
150
151static const struct dynamic_odt single_S[4] = {
152 { /* cs0 */
153 DDR_ODT_NEVER,
154 DDR_ODT_ALL,
155 DDR4_RTT_40_OHM,
156 DDR4_RTT_WR_OFF
157 },
158 {},
159 {},
160 {},
161};
162
163static const struct dynamic_odt dual_DD[4] = {
164 { /* cs0 */
165 DDR_ODT_NEVER,
166 DDR_ODT_SAME_DIMM,
167 DDR4_RTT_120_OHM,
168 DDR4_RTT_WR_OFF
169 },
170 { /* cs1 */
171 DDR_ODT_OTHER_DIMM,
172 DDR_ODT_OTHER_DIMM,
173 DDR4_RTT_34_OHM,
174 DDR4_RTT_WR_OFF
175 },
176 { /* cs2 */
177 DDR_ODT_NEVER,
178 DDR_ODT_SAME_DIMM,
179 DDR4_RTT_120_OHM,
180 DDR4_RTT_WR_OFF
181 },
182 { /* cs3 */
183 DDR_ODT_OTHER_DIMM,
184 DDR_ODT_OTHER_DIMM,
185 DDR4_RTT_34_OHM,
186 DDR4_RTT_WR_OFF
187 }
188};
189
190static const struct dynamic_odt dual_SS[4] = {
191 { /* cs0 */
192 DDR_ODT_OTHER_DIMM,
193 DDR_ODT_ALL,
194 DDR4_RTT_34_OHM,
195 DDR4_RTT_WR_120_OHM
196 },
197 {},
198 { /* cs2 */
199 DDR_ODT_OTHER_DIMM,
200 DDR_ODT_ALL,
201 DDR4_RTT_34_OHM,
202 DDR4_RTT_WR_120_OHM
203 },
204 {}
205};
206
207static const struct dynamic_odt dual_D0[4] = {
208 { /* cs0 */
209 DDR_ODT_NEVER,
210 DDR_ODT_SAME_DIMM,
211 DDR4_RTT_40_OHM,
212 DDR4_RTT_WR_OFF
213 },
214 { /* cs1 */
215 DDR_ODT_NEVER,
216 DDR_ODT_NEVER,
217 DDR4_RTT_OFF,
218 DDR4_RTT_WR_OFF
219 },
220 {},
221 {}
222};
223
224static const struct dynamic_odt dual_S0[4] = {
225 { /* cs0 */
226 DDR_ODT_NEVER,
227 DDR_ODT_CS,
228 DDR4_RTT_40_OHM,
229 DDR4_RTT_WR_OFF
230 },
231 {},
232 {},
233 {}
234};
235#endif /* NXP_DDR_PHY_GEN2 */
236
237/*
238 * Automatically select bank interleaving mode based on DIMMs
239 * in this order: cs0_cs1_cs2_cs3, cs0_cs1, null.
240 * This function only deal with one or two slots per controller.
241 */
242static inline unsigned int auto_bank_intlv(const int cs_in_use,
243 const struct dimm_params *pdimm)
244{
245 switch (cs_in_use) {
246 case 0xf:
247 return DDR_BA_INTLV_CS0123;
248 case 0x3:
249 return DDR_BA_INTLV_CS01;
250 case 0x1:
251 return DDR_BA_NONE;
252 case 0x5:
253 return DDR_BA_NONE;
254 default:
255 break;
256 }
257
258 return 0U;
259}
260
261static int cal_odt(const unsigned int clk,
262 struct memctl_opt *popts,
263 struct ddr_conf *conf,
264 struct dimm_params *pdimm,
265 const int dimm_slot_per_ctrl)
266
267{
268 unsigned int i;
269 const struct dynamic_odt *pdodt = NULL;
270
271 const static struct dynamic_odt *table[2][5] = {
272 {single_S, single_D, NULL, NULL},
273 {dual_SS, dual_DD, NULL, NULL},
274 };
275
276 if (dimm_slot_per_ctrl != 1 && dimm_slot_per_ctrl != 2) {
277 ERROR("Unsupported number of DIMMs\n");
278 return -EINVAL;
279 }
280
281 pdodt = table[dimm_slot_per_ctrl - 1][pdimm->n_ranks - 1];
282 if (pdodt == dual_SS) {
283 pdodt = (conf->cs_in_use == 0x5) ? dual_SS :
284 ((conf->cs_in_use == 0x1) ? dual_S0 : NULL);
285 } else if (pdodt == dual_DD) {
286 pdodt = (conf->cs_in_use == 0xf) ? dual_DD :
287 ((conf->cs_in_use == 0x3) ? dual_D0 : NULL);
288 }
289 if (pdodt == dual_DD && pdimm->package_3ds) {
290 ERROR("Too many 3DS DIMMs.\n");
291 return -EINVAL;
292 }
293
294 if (pdodt == NULL) {
295 ERROR("Error determing ODT.\n");
296 return -EINVAL;
297 }
298
299 /* Pick chip-select local options. */
300 for (i = 0U; i < DDRC_NUM_CS; i++) {
301 debug("cs %d\n", i);
302 popts->cs_odt[i].odt_rd_cfg = pdodt[i].odt_rd_cfg;
303 debug(" odt_rd_cfg 0x%x\n",
304 popts->cs_odt[i].odt_rd_cfg);
305 popts->cs_odt[i].odt_wr_cfg = pdodt[i].odt_wr_cfg;
306 debug(" odt_wr_cfg 0x%x\n",
307 popts->cs_odt[i].odt_wr_cfg);
308 popts->cs_odt[i].odt_rtt_norm = pdodt[i].odt_rtt_norm;
309 debug(" odt_rtt_norm 0x%x\n",
310 popts->cs_odt[i].odt_rtt_norm);
311 popts->cs_odt[i].odt_rtt_wr = pdodt[i].odt_rtt_wr;
312 debug(" odt_rtt_wr 0x%x\n",
313 popts->cs_odt[i].odt_rtt_wr);
314 popts->cs_odt[i].auto_precharge = 0;
315 debug(" auto_precharge %d\n",
316 popts->cs_odt[i].auto_precharge);
317 }
318
319 return 0;
320}
321
322static int cal_opts(const unsigned int clk,
323 struct memctl_opt *popts,
324 struct ddr_conf *conf,
325 struct dimm_params *pdimm,
326 const int dimm_slot_per_ctrl,
327 const unsigned int ip_rev)
328{
329 popts->rdimm = pdimm->rdimm;
330 popts->mirrored_dimm = pdimm->mirrored_dimm;
331#ifdef CONFIG_DDR_ECC_EN
332 popts->ecc_mode = pdimm->edc_config == 0x02 ? 1 : 0;
333#endif
334 popts->ctlr_init_ecc = popts->ecc_mode;
335 debug("ctlr_init_ecc %d\n", popts->ctlr_init_ecc);
336 popts->self_refresh_in_sleep = 1;
337 popts->dynamic_power = 0;
338
339 /*
340 * check sdram width, allow platform override
341 * 0 = 64-bit, 1 = 32-bit, 2 = 16-bit
342 */
343 if (pdimm->primary_sdram_width == 64) {
344 popts->data_bus_dimm = DDR_DBUS_64;
345 popts->otf_burst_chop_en = 1;
346 } else if (pdimm->primary_sdram_width == 32) {
347 popts->data_bus_dimm = DDR_DBUS_32;
348 popts->otf_burst_chop_en = 0;
349 } else if (pdimm->primary_sdram_width == 16) {
350 popts->data_bus_dimm = DDR_DBUS_16;
351 popts->otf_burst_chop_en = 0;
352 } else {
353 ERROR("primary sdram width invalid!\n");
354 return -EINVAL;
355 }
356 popts->data_bus_used = popts->data_bus_dimm;
357 popts->x4_en = (pdimm->device_width == 4) ? 1 : 0;
358 debug("x4_en %d\n", popts->x4_en);
359
360 /* for RDIMM and DDR4 UDIMM/discrete memory, address parity enable */
361 if (popts->rdimm != 0) {
362 popts->ap_en = 1; /* 0 = disable, 1 = enable */
363 } else {
364 popts->ap_en = 0; /* disabled for DDR4 UDIMM/discrete default */
365 }
366
367 if (ip_rev == 0x50500) {
368 popts->ap_en = 0;
369 }
370
371 debug("ap_en %d\n", popts->ap_en);
372
373 /* BSTTOPRE precharge interval uses 1/4 of refint value. */
374 popts->bstopre = picos_to_mclk(clk, pdimm->refresh_rate_ps) >> 2;
375 popts->tfaw_ps = pdimm->tfaw_ps;
376
377 return 0;
378}
379
380static void cal_intlv(const int num_ctlrs,
381 struct memctl_opt *popts,
382 struct ddr_conf *conf,
383 struct dimm_params *pdimm)
384{
385#ifdef NXP_DDR_INTLV_256B
386 if (num_ctlrs == 2) {
387 popts->ctlr_intlv = 1;
388 popts->ctlr_intlv_mode = DDR_256B_INTLV;
389 }
390#endif
391 debug("ctlr_intlv %d\n", popts->ctlr_intlv);
392 debug("ctlr_intlv_mode %d\n", popts->ctlr_intlv_mode);
393
394 popts->ba_intlv = auto_bank_intlv(conf->cs_in_use, pdimm);
395 debug("ba_intlv 0x%x\n", popts->ba_intlv);
396}
397
398static int update_burst_length(struct memctl_opt *popts)
399{
400 /* Choose burst length. */
401 if ((popts->data_bus_used == DDR_DBUS_32) ||
402 (popts->data_bus_used == DDR_DBUS_16)) {
403 /* 32-bit or 16-bit bus */
404 popts->otf_burst_chop_en = 0;
405 popts->burst_length = DDR_BL8;
406 } else if (popts->otf_burst_chop_en != 0) { /* on-the-fly burst chop */
407 popts->burst_length = DDR_OTF; /* on-the-fly BC4 and BL8 */
408 } else {
409 popts->burst_length = DDR_BL8;
410 }
411 debug("data_bus_used %d\n", popts->data_bus_used);
412 debug("otf_burst_chop_en %d\n", popts->otf_burst_chop_en);
413 debug("burst_length 0x%x\n", popts->burst_length);
414 /*
415 * If a reduced data width is requested, but the SPD
416 * specifies a physically wider device, adjust the
417 * computed dimm capacities accordingly before
418 * assigning addresses.
419 * 0 = 64-bit, 1 = 32-bit, 2 = 16-bit
420 */
421 if (popts->data_bus_dimm > popts->data_bus_used) {
422 ERROR("Data bus configuration error\n");
423 return -EINVAL;
424 }
425 popts->dbw_cap_shift = popts->data_bus_used - popts->data_bus_dimm;
426 debug("dbw_cap_shift %d\n", popts->dbw_cap_shift);
427
428 return 0;
429}
430
431int cal_board_params(struct ddr_info *priv,
432 const struct board_timing *dimm,
433 int len)
434{
435 const unsigned long speed = priv->clk / 1000000;
436 const struct dimm_params *pdimm = &priv->dimm;
437 struct memctl_opt *popts = &priv->opt;
438 struct rc_timing const *prt = NULL;
439 struct rc_timing const *chosen = NULL;
440 int i;
441
442 for (i = 0; i < len; i++) {
443 if (pdimm->rc == dimm[i].rc) {
444 prt = dimm[i].p;
445 break;
446 }
447 }
448 if (prt == NULL) {
449 ERROR("Board parameters no match.\n");
450 return -EINVAL;
451 }
452 while (prt->speed_bin != 0) {
453 if (speed <= prt->speed_bin) {
454 chosen = prt;
455 break;
456 }
457 prt++;
458 }
459 if (chosen == NULL) {
460 ERROR("timing no match for speed %lu\n", speed);
461 return -EINVAL;
462 }
463 popts->clk_adj = prt->clk_adj;
464 popts->wrlvl_start = prt->wrlvl;
465 popts->wrlvl_ctl_2 = (prt->wrlvl * 0x01010101 + dimm[i].add1) &
466 0xFFFFFFFF;
467 popts->wrlvl_ctl_3 = (prt->wrlvl * 0x01010101 + dimm[i].add2) &
468 0xFFFFFFFF;
469
470 return 0;
471}
472
473static int synthesize_ctlr(struct ddr_info *priv)
474{
475 int ret;
476
477 ret = cal_odt(priv->clk,
478 &priv->opt,
479 &priv->conf,
480 &priv->dimm,
481 priv->dimm_on_ctlr);
482 if (ret != 0) {
483 return ret;
484 }
485
486 ret = cal_opts(priv->clk,
487 &priv->opt,
488 &priv->conf,
489 &priv->dimm,
490 priv->dimm_on_ctlr,
491 priv->ip_rev);
492
493 if (ret != 0) {
494 return ret;
495 }
496
497 cal_intlv(priv->num_ctlrs, &priv->opt, &priv->conf, &priv->dimm);
498 ret = ddr_board_options(priv);
499 if (ret != 0) {
500 ERROR("Failed matching board timing.\n");
501 }
502
503 ret = update_burst_length(&priv->opt);
504
505 return ret;
506}
507
508/* Return the bit mask of valid DIMMs found */
509static int parse_spd(struct ddr_info *priv)
510{
511 struct ddr_conf *conf = &priv->conf;
512 struct dimm_params *dimm = &priv->dimm;
513 int j, valid_mask = 0;
514
515#ifdef CONFIG_DDR_NODIMM
516 valid_mask = ddr_get_ddr_params(dimm, conf);
517 if (valid_mask < 0) {
518 ERROR("DDR params error\n");
519 return valid_mask;
520 }
521#else
522 const int *spd_addr = priv->spd_addr;
523 const int num_ctlrs = priv->num_ctlrs;
524 const int num_dimm = priv->dimm_on_ctlr;
525 struct ddr4_spd spd[2];
526 unsigned int spd_checksum[2];
527 int addr_idx = 0;
528 int spd_idx = 0;
529 int ret, addr, i;
530
531 /* Scan all DIMMs */
532 for (i = 0; i < num_ctlrs; i++) {
533 debug("Controller %d\n", i);
534 for (j = 0; j < num_dimm; j++, addr_idx++) {
535 debug("DIMM %d\n", j);
536 addr = spd_addr[addr_idx];
537 if (addr == 0) {
538 if (j == 0) {
539 ERROR("First SPD addr wrong.\n");
540 return -EINVAL;
541 }
542 continue;
543 }
544 debug("addr 0x%x\n", addr);
545 ret = read_spd(addr, &spd[spd_idx],
546 sizeof(struct ddr4_spd));
547 if (ret != 0) { /* invalid */
548 debug("Invalid SPD at address 0x%x\n", addr);
549 continue;
550 }
551
552 spd_checksum[spd_idx] =
553 (spd[spd_idx].crc[1] << 24) |
554 (spd[spd_idx].crc[0] << 16) |
555 (spd[spd_idx].mod_section.uc[127] << 8) |
556 (spd[spd_idx].mod_section.uc[126] << 0);
557 debug("checksum 0x%x\n", spd_checksum[spd_idx]);
558 if (spd_checksum[spd_idx] == 0) {
559 debug("Bad checksum, ignored.\n");
560 continue;
561 }
562 if (spd_idx == 0) {
563 /* first valid SPD */
564 ret = cal_dimm_params(&spd[0], dimm);
565 if (ret != 0) {
566 ERROR("SPD calculation error\n");
567 return -EINVAL;
568 }
569 }
570
571 if (spd_idx != 0 && spd_checksum[0] !=
572 spd_checksum[spd_idx]) {
573 ERROR("Not identical DIMMs.\n");
574 return -EINVAL;
575 }
576 conf->dimm_in_use[j] = 1;
577 valid_mask |= 1 << addr_idx;
578 spd_idx = 1;
579 }
580 debug("done with controller %d\n", i);
581 }
582 switch (num_ctlrs) {
583 case 1:
584 if ((valid_mask & 0x1) == 0) {
585 ERROR("First slot cannot be empty.\n");
586 return -EINVAL;
587 }
588 break;
589 case 2:
590 switch (num_dimm) {
591 case 1:
592 if (valid_mask == 0) {
593 ERROR("Both slot empty\n");
594 return -EINVAL;
595 }
596 break;
597 case 2:
598 if (valid_mask != 0x5 &&
599 valid_mask != 0xf &&
600 (valid_mask & 0x7) != 0x4 &&
601 (valid_mask & 0xd) != 0x1) {
602 ERROR("Invalid DIMM combination.\n");
603 return -EINVAL;
604 }
605 break;
606 default:
607 ERROR("Invalid number of DIMMs.\n");
608 return -EINVAL;
609 }
610 break;
611 default:
612 ERROR("Invalid number of controllers.\n");
613 return -EINVAL;
614 }
615 /* now we have valid and identical DIMMs on controllers */
616#endif /* CONFIG_DDR_NODIMM */
617
618 debug("cal cs\n");
619 conf->cs_in_use = 0;
620 for (j = 0; j < DDRC_NUM_DIMM; j++) {
621 if (conf->dimm_in_use[j] == 0) {
622 continue;
623 }
624 switch (dimm->n_ranks) {
625 case 4:
626 ERROR("Quad-rank DIMM not supported\n");
627 return -EINVAL;
628 case 2:
629 conf->cs_on_dimm[j] = 0x3 << (j * CONFIG_CS_PER_SLOT);
630 conf->cs_in_use |= conf->cs_on_dimm[j];
631 break;
632 case 1:
633 conf->cs_on_dimm[j] = 0x1 << (j * CONFIG_CS_PER_SLOT);
634 conf->cs_in_use |= conf->cs_on_dimm[j];
635 break;
636 default:
637 ERROR("SPD error with n_ranks\n");
638 return -EINVAL;
639 }
640 debug("cs_in_use = %x\n", conf->cs_in_use);
641 debug("cs_on_dimm[%d] = %x\n", j, conf->cs_on_dimm[j]);
642 }
643#ifndef CONFIG_DDR_NODIMM
644 if (priv->dimm.rdimm != 0) {
645 NOTICE("RDIMM %s\n", priv->dimm.mpart);
646 } else {
647 NOTICE("UDIMM %s\n", priv->dimm.mpart);
648 }
649#else
650 NOTICE("%s\n", priv->dimm.mpart);
651#endif
652
653 return valid_mask;
654}
655
656static unsigned long long assign_intlv_addr(
657 const struct dimm_params *pdimm,
658 const struct memctl_opt *opt,
659 struct ddr_conf *conf,
660 const unsigned long long current_mem_base)
661{
662 int i;
663 int ctlr_density_mul = 0;
664 const unsigned long long rank_density = pdimm->rank_density >>
665 opt->dbw_cap_shift;
666 unsigned long long total_ctlr_mem;
667
668 debug("rank density 0x%llx\n", rank_density);
669 switch (opt->ba_intlv & DDR_BA_INTLV_CS0123) {
670 case DDR_BA_INTLV_CS0123:
671 ctlr_density_mul = 4;
672 break;
673 case DDR_BA_INTLV_CS01:
674 ctlr_density_mul = 2;
675 break;
676 default:
677 ctlr_density_mul = 1;
678 break;
679 }
680 debug("ctlr density mul %d\n", ctlr_density_mul);
681 switch (opt->ctlr_intlv_mode) {
682 case DDR_256B_INTLV:
683 total_ctlr_mem = 2 * ctlr_density_mul * rank_density;
684 break;
685 default:
686 ERROR("Unknown interleaving mode");
687 return 0;
688 }
689 conf->base_addr = current_mem_base;
690 conf->total_mem = total_ctlr_mem;
691
692 /* overwrite cs_in_use bitmask with controller interleaving */
693 conf->cs_in_use = (1 << ctlr_density_mul) - 1;
694 debug("Overwrite cs_in_use as %x\n", conf->cs_in_use);
695
696 /* Fill addr with each cs in use */
697 for (i = 0; i < ctlr_density_mul; i++) {
698 conf->cs_base_addr[i] = current_mem_base;
699 conf->cs_size[i] = total_ctlr_mem;
700 debug("CS %d\n", i);
701 debug(" base_addr 0x%llx\n", conf->cs_base_addr[i]);
702 debug(" size 0x%llx\n", conf->cs_size[i]);
703 }
704
705 return total_ctlr_mem;
706}
707
708static unsigned long long assign_non_intlv_addr(
709 const struct dimm_params *pdimm,
710 const struct memctl_opt *opt,
711 struct ddr_conf *conf,
712 unsigned long long current_mem_base)
713{
714 int i;
715 const unsigned long long rank_density = pdimm->rank_density >>
716 opt->dbw_cap_shift;
717 unsigned long long total_ctlr_mem = 0ULL;
718
719 debug("rank density 0x%llx\n", rank_density);
720 conf->base_addr = current_mem_base;
721
722 /* assign each cs */
723 switch (opt->ba_intlv & DDR_BA_INTLV_CS0123) {
724 case DDR_BA_INTLV_CS0123:
725 for (i = 0; i < DDRC_NUM_CS; i++) {
726 conf->cs_base_addr[i] = current_mem_base;
727 conf->cs_size[i] = rank_density << 2;
728 total_ctlr_mem += rank_density;
729 }
730 break;
731 case DDR_BA_INTLV_CS01:
732 for (i = 0; ((conf->cs_in_use & (1 << i)) != 0) && i < 2; i++) {
733 conf->cs_base_addr[i] = current_mem_base;
734 conf->cs_size[i] = rank_density << 1;
735 total_ctlr_mem += rank_density;
736 }
737 current_mem_base += total_ctlr_mem;
738 for (; ((conf->cs_in_use & (1 << i)) != 0) && i < DDRC_NUM_CS;
739 i++) {
740 conf->cs_base_addr[i] = current_mem_base;
741 conf->cs_size[i] = rank_density;
742 total_ctlr_mem += rank_density;
743 current_mem_base += rank_density;
744 }
745 break;
746 case DDR_BA_NONE:
747 for (i = 0; ((conf->cs_in_use & (1 << i)) != 0) &&
748 (i < DDRC_NUM_CS); i++) {
749 conf->cs_base_addr[i] = current_mem_base;
750 conf->cs_size[i] = rank_density;
751 current_mem_base += rank_density;
752 total_ctlr_mem += rank_density;
753 }
754 break;
755 default:
756 ERROR("Unsupported bank interleaving\n");
757 return 0;
758 }
759 for (i = 0; ((conf->cs_in_use & (1 << i)) != 0) &&
760 (i < DDRC_NUM_CS); i++) {
761 debug("CS %d\n", i);
762 debug(" base_addr 0x%llx\n", conf->cs_base_addr[i]);
763 debug(" size 0x%llx\n", conf->cs_size[i]);
764 }
765
766 return total_ctlr_mem;
767}
768
769unsigned long long assign_addresses(struct ddr_info *priv)
770 __attribute__ ((weak));
771
772unsigned long long assign_addresses(struct ddr_info *priv)
773{
774 struct memctl_opt *opt = &priv->opt;
775 const struct dimm_params *dimm = &priv->dimm;
776 struct ddr_conf *conf = &priv->conf;
777 unsigned long long current_mem_base = priv->mem_base;
778 unsigned long long total_mem;
779
780 total_mem = 0ULL;
781 debug("ctlr_intlv %d\n", opt->ctlr_intlv);
782 if (opt->ctlr_intlv != 0) {
783 total_mem = assign_intlv_addr(dimm, opt, conf,
784 current_mem_base);
785 } else {
786 /*
787 * Simple linear assignment if memory controllers are not
788 * interleaved. This is only valid for SoCs with single DDRC.
789 */
790 total_mem = assign_non_intlv_addr(dimm, opt, conf,
791 current_mem_base);
792 }
793 conf->total_mem = total_mem;
794 debug("base 0x%llx\n", current_mem_base);
795 debug("Total mem by assignment is 0x%llx\n", total_mem);
796
797 return total_mem;
798}
799
800static int cal_ddrc_regs(struct ddr_info *priv)
801{
802 int ret;
803
804 ret = compute_ddrc(priv->clk,
805 &priv->opt,
806 &priv->conf,
807 &priv->ddr_reg,
808 &priv->dimm,
809 priv->ip_rev);
810 if (ret != 0) {
811 ERROR("Calculating DDR registers failed\n");
812 }
813
814 return ret;
815}
816
817#endif /* CONFIG_STATIC_DDR */
818
819static int write_ddrc_regs(struct ddr_info *priv)
820{
821 int i;
822 int ret;
823
824 for (i = 0; i < priv->num_ctlrs; i++) {
825 ret = ddrc_set_regs(priv->clk, &priv->ddr_reg, priv->ddr[i], 0);
826 if (ret != 0) {
827 ERROR("Writing DDR register(s) failed\n");
828 return ret;
829 }
830 }
831
832 return 0;
833}
834
835long long dram_init(struct ddr_info *priv
836#if defined(NXP_HAS_CCN504) || defined(NXP_HAS_CCN508)
837 , uintptr_t nxp_ccn_hn_f0_addr
838#endif
839 )
840{
841 uint64_t time __unused;
842 long long dram_size;
843 int ret;
844 const uint64_t time_base = get_timer_val(0);
845 unsigned int ip_rev = get_ddrc_version(priv->ddr[0]);
846
847 int valid_spd_mask __unused;
848 int scratch = 0x0;
849
850 priv->ip_rev = ip_rev;
851
852#ifndef CONFIG_STATIC_DDR
853 INFO("time base %llu ms\n", time_base);
854 debug("Parse DIMM SPD(s)\n");
855 valid_spd_mask = parse_spd(priv);
856
857 if (valid_spd_mask < 0) {
858 ERROR("Parsing DIMM Error\n");
859 return valid_spd_mask;
860 }
861
862#if defined(NXP_HAS_CCN504) || defined(NXP_HAS_CCN508)
863 if (priv->num_ctlrs == 2 || priv->num_ctlrs == 1) {
864 ret = disable_unused_ddrc(priv, valid_spd_mask,
865 nxp_ccn_hn_f0_addr);
866 if (ret != 0) {
867 return ret;
868 }
869 }
870#endif
871
872 time = get_timer_val(time_base);
873 INFO("Time after parsing SPD %llu ms\n", time);
874 debug("Synthesize configurations\n");
875 ret = synthesize_ctlr(priv);
876 if (ret != 0) {
877 ERROR("Synthesize config error\n");
878 return ret;
879 }
880
881 debug("Assign binding addresses\n");
882 dram_size = assign_addresses(priv);
883 if (dram_size == 0) {
884 ERROR("Assigning address error\n");
885 return -EINVAL;
886 }
887
888 debug("Calculate controller registers\n");
889 ret = cal_ddrc_regs(priv);
890 if (ret != 0) {
891 ERROR("Calculate register error\n");
892 return ret;
893 }
894
895 ret = compute_ddr_phy(priv);
896 if (ret != 0)
897 ERROR("Calculating DDR PHY registers failed.\n");
898
899#else
900 dram_size = board_static_ddr(priv);
901 if (dram_size == 0) {
902 ERROR("Error getting static DDR settings.\n");
903 return -EINVAL;
904 }
905#endif
906
907 if (priv->warm_boot_flag == DDR_WARM_BOOT) {
908 scratch = (priv->ddr_reg).sdram_cfg[1];
909 scratch = scratch & ~(SDRAM_CFG2_D_INIT);
910 priv->ddr_reg.sdram_cfg[1] = scratch;
911 }
912
913 time = get_timer_val(time_base);
914 INFO("Time before programming controller %llu ms\n", time);
915 debug("Program controller registers\n");
916 ret = write_ddrc_regs(priv);
917 if (ret != 0) {
918 ERROR("Programing DDRC error\n");
919 return ret;
920 }
921
922 puts("");
923 NOTICE("%lld GB ", dram_size >> 30);
924 print_ddr_info(priv->ddr[0]);
925
926 time = get_timer_val(time_base);
927 INFO("Time used by DDR driver %llu ms\n", time);
928
929 return dram_size;
930}