blob: 17a2b6a474d64a270329bc7768d282dd0cfdaad5 [file] [log] [blame]
Pankaj Guptac518de42020-12-09 14:02:39 +05301/*
2 * Copyright 2021 NXP
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 *
6 */
7
8#include <errno.h>
9#include <stdbool.h>
10#include <stdint.h>
11#include <stdio.h>
12#include <stdlib.h>
13
14#include <common/debug.h>
15#include <ddr.h>
16#include <drivers/delay_timer.h>
17#include <immap.h>
18
19#define BIST_CR 0x80060000
20#define BIST_CR_EN 0x80000000
21#define BIST_CR_STAT 0x00000001
22#define CTLR_INTLV_MASK 0x20000000
23
24#pragma weak run_bist
25
26bool run_bist(void)
27{
28#ifdef BIST_EN
29 return true;
30#else
31 return false;
32#endif
33}
34
35/*
36 * Perform build-in test on memory
37 * timeout value in 10ms
38 */
39int bist(const struct ccsr_ddr *ddr, int timeout)
40{
41 const unsigned int test_pattern[10] = {
42 0xffffffff,
43 0x00000000,
44 0xaaaaaaaa,
45 0x55555555,
46 0xcccccccc,
47 0x33333333,
48 0x12345678,
49 0xabcdef01,
50 0xaa55aa55,
51 0x55aa55aa
52 };
53 unsigned int mtcr, err_detect, err_sbe;
54 unsigned int cs0_config;
55 unsigned int csn_bnds[4];
56 int ret = 0;
57 uint32_t i;
58#ifdef CONFIG_DDR_ADDR_DEC
59 uint32_t dec_9 = ddr_in32(&ddr->dec[9]);
60 uint32_t pos = 0U;
61 uint32_t map_save = 0U;
62 uint32_t temp32 = 0U;
63 uint32_t map, shift, highest;
64#endif
65
66 cs0_config = ddr_in32(&ddr->csn_cfg[0]);
67 if ((cs0_config & CTLR_INTLV_MASK) != 0U) {
68 /* set bnds to non-interleaving */
69 for (i = 0U; i < 4U; i++) {
70 csn_bnds[i] = ddr_in32(&ddr->bnds[i].a);
71 ddr_out32(&ddr->bnds[i].a,
72 (csn_bnds[i] & U(0xfffefffe)) >> 1U);
73 }
74 ddr_out32(&ddr->csn_cfg[0], cs0_config & ~CTLR_INTLV_MASK);
75#ifdef CONFIG_DDR_ADDR_DEC
76 if ((dec_9 & 0x1U) != 0U) {
77 highest = (dec_9 >> 26U) == U(0x3F) ? 0U : dec_9 >> 26U;
78 pos = 37U;
79 for (i = 0U; i < 36U; i++) { /* Go through all 37 */
80 if ((i % 4U) == 0U) {
81 temp32 = ddr_in32(&ddr->dec[i >> 2U]);
82 }
83 shift = (3U - i % 4U) * 8U + 2U;
84 map = (temp32 >> shift) & U(0x3F);
85 if (map > highest && map != U(0x3F)) {
86 highest = map;
87 pos = i;
88 }
89 }
90 debug("\nFound highest position %d, mapping to %d, ",
91 pos, highest);
92 map_save = ddr_in32(&ddr->dec[pos >> 2]);
93 shift = (3U - pos % 4U) * 8U + 2U;
94 debug("in dec[%d], bit %d (0x%x)\n",
95 pos >> 2U, shift, map_save);
96 temp32 = map_save & ~(U(0x3F) << shift);
97 temp32 |= 8U << shift;
98 ddr_out32(&ddr->dec[pos >> 2U], temp32);
99 timeout <<= 2U;
100 debug("Increase wait time to %d ms\n", timeout * 10);
101 }
102#endif
103 }
104 for (i = 0U; i < 10U; i++) {
105 ddr_out32(&ddr->mtp[i], test_pattern[i]);
106 }
107 mtcr = BIST_CR;
108 ddr_out32(&ddr->mtcr, mtcr);
109 do {
110 mdelay(10);
111 mtcr = ddr_in32(&ddr->mtcr);
112 } while (timeout-- > 0 && ((mtcr & BIST_CR_EN) != 0));
113 if (timeout <= 0) {
114 ERROR("Timeout\n");
115 } else {
116 debug("Timer remains %d\n", timeout);
117 }
118
119 err_detect = ddr_in32(&ddr->err_detect);
120 err_sbe = ddr_in32(&ddr->err_sbe);
121 if (err_detect != 0U || ((err_sbe & U(0xffff)) != 0U)) {
122 ERROR("ECC error detected\n");
123 ret = -EIO;
124 }
125
126 if ((cs0_config & CTLR_INTLV_MASK) != 0) {
127 for (i = 0U; i < 4U; i++) {
128 ddr_out32(&ddr->bnds[i].a, csn_bnds[i]);
129 }
130 ddr_out32(&ddr->csn_cfg[0], cs0_config);
131#ifdef CONFIG_DDR_ADDR_DEC
132 if ((dec_9 & U(0x1)) != 0U) {
133 ddr_out32(&ddr->dec[pos >> 2], map_save);
134 }
135#endif
136 }
137 if ((mtcr & BIST_CR_STAT) != 0) {
138 ERROR("Built-in self test failed\n");
139 ret = -EIO;
140 } else {
141 NOTICE("Build-in self test passed\n");
142 }
143
144 return ret;
145}
146
147void dump_ddrc(unsigned int *ddr)
148{
149#ifdef DDR_DEBUG
150 uint32_t i;
151 unsigned long val;
152
153 for (i = 0U; i < U(0x400); i++, ddr++) {
154 val = ddr_in32(ddr);
155 if (val != 0U) { /* skip zeros */
156 debug("*0x%lx = 0x%lx\n", (unsigned long)ddr, val);
157 }
158 }
159#endif
160}
161
162#ifdef ERRATA_DDR_A009803
163static void set_wait_for_bits_clear(const void *ptr,
164 unsigned int value,
165 unsigned int bits)
166{
167 int timeout = 1000;
168
169 ddr_out32(ptr, value);
170 do {
171 udelay(100);
172 } while (timeout-- > 0 && ((ddr_in32(ptr) & bits) != 0));
173
174 if (timeout <= 0) {
175 ERROR("wait for clear timeout.\n");
176 }
177}
178#endif
179
180#if (DDRC_NUM_CS > 4)
181#error Invalid setting for DDRC_NUM_CS
182#endif
183
184/*
185 * If supported by the platform, writing to DDR controller takes two
186 * passes to deassert DDR reset to comply with JEDEC specs for RDIMMs.
187 */
188int ddrc_set_regs(const unsigned long clk,
189 const struct ddr_cfg_regs *regs,
190 const struct ccsr_ddr *ddr,
191 int twopass)
192{
193 unsigned int i, bus_width;
194 unsigned int temp_sdram_cfg;
195 unsigned int total_mem_per_ctrl, total_mem_per_ctrl_adj;
196 const int mod_bnds = regs->cs[0].config & CTLR_INTLV_MASK;
197 int timeout;
198 int ret = 0;
199#if defined(ERRATA_DDR_A009942) || defined(ERRATA_DDR_A010165)
200 unsigned long ddr_freq;
201 unsigned int tmp;
202#ifdef ERRATA_DDR_A009942
203 unsigned int check;
204 unsigned int cpo_min = U(0xff);
205 unsigned int cpo_max = 0U;
206#endif
207#endif
208
209 if (twopass == 2U) {
210 goto after_reset;
211 }
212
213 /* Set cdr1 first in case 0.9v VDD is enabled for some SoCs*/
214 ddr_out32(&ddr->ddr_cdr1, regs->cdr[0]);
215
216 ddr_out32(&ddr->sdram_clk_cntl, regs->clk_cntl);
217
218 for (i = 0U; i < DDRC_NUM_CS; i++) {
219 if (mod_bnds != 0U) {
220 ddr_out32(&ddr->bnds[i].a,
221 (regs->cs[i].bnds & U(0xfffefffe)) >> 1U);
222 } else {
223 ddr_out32(&ddr->bnds[i].a, regs->cs[i].bnds);
224 }
225 ddr_out32(&ddr->csn_cfg_2[i], regs->cs[i].config_2);
226 }
227
228 ddr_out32(&ddr->timing_cfg_0, regs->timing_cfg[0]);
229 ddr_out32(&ddr->timing_cfg_1, regs->timing_cfg[1]);
230 ddr_out32(&ddr->timing_cfg_2, regs->timing_cfg[2]);
231 ddr_out32(&ddr->timing_cfg_3, regs->timing_cfg[3]);
232 ddr_out32(&ddr->timing_cfg_4, regs->timing_cfg[4]);
233 ddr_out32(&ddr->timing_cfg_5, regs->timing_cfg[5]);
234 ddr_out32(&ddr->timing_cfg_6, regs->timing_cfg[6]);
235 ddr_out32(&ddr->timing_cfg_7, regs->timing_cfg[7]);
236 ddr_out32(&ddr->timing_cfg_8, regs->timing_cfg[8]);
237 ddr_out32(&ddr->timing_cfg_9, regs->timing_cfg[9]);
238 ddr_out32(&ddr->zq_cntl, regs->zq_cntl);
239 for (i = 0U; i < 4U; i++) {
240 ddr_out32(&ddr->dq_map[i], regs->dq_map[i]);
241 }
242 ddr_out32(&ddr->sdram_cfg_3, regs->sdram_cfg[2]);
243 ddr_out32(&ddr->sdram_mode, regs->sdram_mode[0]);
244 ddr_out32(&ddr->sdram_mode_2, regs->sdram_mode[1]);
245 ddr_out32(&ddr->sdram_mode_3, regs->sdram_mode[2]);
246 ddr_out32(&ddr->sdram_mode_4, regs->sdram_mode[3]);
247 ddr_out32(&ddr->sdram_mode_5, regs->sdram_mode[4]);
248 ddr_out32(&ddr->sdram_mode_6, regs->sdram_mode[5]);
249 ddr_out32(&ddr->sdram_mode_7, regs->sdram_mode[6]);
250 ddr_out32(&ddr->sdram_mode_8, regs->sdram_mode[7]);
251 ddr_out32(&ddr->sdram_mode_9, regs->sdram_mode[8]);
252 ddr_out32(&ddr->sdram_mode_10, regs->sdram_mode[9]);
253 ddr_out32(&ddr->sdram_mode_11, regs->sdram_mode[10]);
254 ddr_out32(&ddr->sdram_mode_12, regs->sdram_mode[11]);
255 ddr_out32(&ddr->sdram_mode_13, regs->sdram_mode[12]);
256 ddr_out32(&ddr->sdram_mode_14, regs->sdram_mode[13]);
257 ddr_out32(&ddr->sdram_mode_15, regs->sdram_mode[14]);
258 ddr_out32(&ddr->sdram_mode_16, regs->sdram_mode[15]);
259 ddr_out32(&ddr->sdram_md_cntl, regs->md_cntl);
260#ifdef ERRATA_DDR_A009663
261 ddr_out32(&ddr->sdram_interval,
262 regs->interval & ~SDRAM_INTERVAL_BSTOPRE);
263#else
264 ddr_out32(&ddr->sdram_interval, regs->interval);
265#endif
266 ddr_out32(&ddr->sdram_data_init, regs->data_init);
267 if (regs->eor != 0) {
268 ddr_out32(&ddr->eor, regs->eor);
269 }
270
271 ddr_out32(&ddr->wrlvl_cntl, regs->wrlvl_cntl[0]);
272#ifndef NXP_DDR_EMU
273 /*
274 * Skip these two registers if running on emulator
275 * because emulator doesn't have skew between bytes.
276 */
277
278 if (regs->wrlvl_cntl[1] != 0) {
279 ddr_out32(&ddr->ddr_wrlvl_cntl_2, regs->wrlvl_cntl[1]);
280 }
281 if (regs->wrlvl_cntl[2] != 0) {
282 ddr_out32(&ddr->ddr_wrlvl_cntl_3, regs->wrlvl_cntl[2]);
283 }
284#endif
285
286 ddr_out32(&ddr->ddr_sr_cntr, regs->ddr_sr_cntr);
287 ddr_out32(&ddr->ddr_sdram_rcw_1, regs->sdram_rcw[0]);
288 ddr_out32(&ddr->ddr_sdram_rcw_2, regs->sdram_rcw[1]);
289 ddr_out32(&ddr->ddr_sdram_rcw_3, regs->sdram_rcw[2]);
290 ddr_out32(&ddr->ddr_sdram_rcw_4, regs->sdram_rcw[3]);
291 ddr_out32(&ddr->ddr_sdram_rcw_5, regs->sdram_rcw[4]);
292 ddr_out32(&ddr->ddr_sdram_rcw_6, regs->sdram_rcw[5]);
293 ddr_out32(&ddr->ddr_cdr2, regs->cdr[1]);
294 ddr_out32(&ddr->sdram_cfg_2, regs->sdram_cfg[1]);
295 ddr_out32(&ddr->init_addr, regs->init_addr);
296 ddr_out32(&ddr->init_ext_addr, regs->init_ext_addr);
297
298#ifdef ERRATA_DDR_A009803
299 /* part 1 of 2 */
300 if ((regs->sdram_cfg[1] & SDRAM_CFG2_AP_EN) != 0) {
301 if ((regs->sdram_cfg[0] & SDRAM_CFG_RD_EN) != 0) {
302 ddr_out32(&ddr->ddr_sdram_rcw_2,
303 regs->sdram_rcw[1] & ~0xf0);
304 }
305
306 ddr_out32(&ddr->err_disable,
307 regs->err_disable | DDR_ERR_DISABLE_APED);
308 }
309#else
310 ddr_out32(&ddr->err_disable, regs->err_disable);
311#endif
312 ddr_out32(&ddr->err_int_en, regs->err_int_en);
313
314 /* For DDRC 5.05 only */
315 if (get_ddrc_version(ddr) == 0x50500) {
316 ddr_out32(&ddr->tx_cfg[1], 0x1f1f1f1f);
317 ddr_out32(&ddr->debug[3], 0x124a02c0);
318 }
319
320 for (i = 0U; i < 4U; i++) {
321 if (regs->tx_cfg[i] != 0) {
322 ddr_out32(&ddr->tx_cfg[i], regs->tx_cfg[i]);
323 }
324 }
325 for (i = 0U; i < 64U; i++) {
326 if (regs->debug[i] != 0) {
327#ifdef ERRATA_DDR_A009942
328 if (i == 28U) {
329 continue;
330 }
331#endif
332 ddr_out32(&ddr->debug[i], regs->debug[i]);
333 }
334 }
335#ifdef CONFIG_DDR_ADDR_DEC
336 if ((regs->dec[9] & 1) != 0U) {
337 for (i = 0U; i < 10U; i++) {
338 ddr_out32(&ddr->dec[i], regs->dec[i]);
339 }
340 if (mod_bnds != 0) {
341 debug("Disable address decoding\n");
342 ddr_out32(&ddr->dec[9], 0);
343 }
344 }
345#endif
346
347#ifdef ERRATA_DDR_A008511
348 /* Part 1 of 2 */
349 /* This erraum only applies to verion 5.2.1 */
350 if (get_ddrc_version(ddr) == 0x50200) {
351 ERROR("Unsupported SoC.\n");
352 } else if (get_ddrc_version(ddr) == 0x50201) {
353 ddr_out32(&ddr->debug[37], (U(1) << 31));
354 ddr_out32(&ddr->ddr_cdr2,
355 regs->cdr[1] | DDR_CDR2_VREF_TRAIN_EN);
356 } else {
357 debug("Erratum A008511 doesn't apply.\n");
358 }
359#endif
360
361#ifdef ERRATA_DDR_A009942
362 ddr_freq = clk / 1000000U;
363 tmp = ddr_in32(&ddr->debug[28]);
364 tmp &= U(0xff0fff00);
365 tmp |= ddr_freq <= 1333U ? U(0x0080006a) :
366 (ddr_freq <= 1600U ? U(0x0070006f) :
367 (ddr_freq <= 1867U ? U(0x00700076) : U(0x0060007b)));
368 if (regs->debug[28] != 0) {
369 tmp &= ~0xff;
370 tmp |= regs->debug[28] & 0xff;
371 } else {
372 WARN("Warning: Optimal CPO value not set.\n");
373 }
374 ddr_out32(&ddr->debug[28], tmp);
375#endif
376
377#ifdef ERRATA_DDR_A010165
378 ddr_freq = clk / 1000000U;
379 if ((ddr_freq > 1900) && (ddr_freq < 2300)) {
380 tmp = ddr_in32(&ddr->debug[28]);
381 ddr_out32(&ddr->debug[28], tmp | 0x000a0000);
382 }
383#endif
384 /*
385 * For RDIMMs, JEDEC spec requires clocks to be stable before reset is
386 * deasserted. Clocks start when any chip select is enabled and clock
387 * control register is set. Because all DDR components are connected to
388 * one reset signal, this needs to be done in two steps. Step 1 is to
389 * get the clocks started. Step 2 resumes after reset signal is
390 * deasserted.
391 */
392 if (twopass == 1) {
393 udelay(200);
394 return 0;
395 }
396
397 /* As per new sequence flow shall be write CSn_CONFIG registers needs to
398 * be set after all the other DDR controller registers are set, then poll
399 * for PHY_INIT_CMPLT = 1 , then wait at least 100us (micro seconds),
400 * then set the MEM_EN = 1
401 */
402 for (i = 0U; i < DDRC_NUM_CS; i++) {
403 if (mod_bnds != 0U && i == 0U) {
404 ddr_out32(&ddr->csn_cfg[i],
405 (regs->cs[i].config & ~CTLR_INTLV_MASK));
406 } else {
407 ddr_out32(&ddr->csn_cfg[i], regs->cs[i].config);
408 }
409 }
410
411after_reset:
412 /* Set, but do not enable the memory */
413 temp_sdram_cfg = regs->sdram_cfg[0];
414 temp_sdram_cfg &= ~(SDRAM_CFG_MEM_EN);
415 ddr_out32(&ddr->sdram_cfg, temp_sdram_cfg);
416
417 if (get_ddrc_version(ddr) < U(0x50500)) {
418 /*
419 * 500 painful micro-seconds must elapse between
420 * the DDR clock setup and the DDR config enable.
421 * DDR2 need 200 us, and DDR3 need 500 us from spec,
422 * we choose the max, that is 500 us for all of case.
423 */
424 udelay(500);
425 /* applied memory barrier */
426 mb();
427 isb();
428 } else {
429 /* wait for PHY complete */
430 timeout = 40;
431 while (((ddr_in32(&ddr->ddr_dsr2) & 0x4) != 0) &&
432 (timeout > 0)) {
433 udelay(500);
434 timeout--;
435 }
436 if (timeout <= 0) {
437 printf("PHY handshake timeout, ddr_dsr2 = %x\n",
438 ddr_in32(&ddr->ddr_dsr2));
439 } else {
440 debug("PHY handshake completed, timer remains %d\n",
441 timeout);
442 }
443 }
444
445 temp_sdram_cfg = ddr_in32(&ddr->sdram_cfg);
446 /* Let the controller go */
447 udelay(100);
448 ddr_out32(&ddr->sdram_cfg, temp_sdram_cfg | SDRAM_CFG_MEM_EN);
449
450 /* applied memory barrier */
451 mb();
452 isb();
453
454 total_mem_per_ctrl = 0;
455 for (i = 0; i < DDRC_NUM_CS; i++) {
456 if ((regs->cs[i].config & 0x80000000) == 0) {
457 continue;
458 }
459 total_mem_per_ctrl += 1 << (
460 ((regs->cs[i].config >> 14) & 0x3) + 2 +
461 ((regs->cs[i].config >> 8) & 0x7) + 12 +
462 ((regs->cs[i].config >> 4) & 0x3) + 0 +
463 ((regs->cs[i].config >> 0) & 0x7) + 8 +
464 ((regs->sdram_cfg[2] >> 4) & 0x3) +
465 3 - ((regs->sdram_cfg[0] >> 19) & 0x3) -
466 26); /* minus 26 (count of 64M) */
467 }
468 total_mem_per_ctrl_adj = total_mem_per_ctrl;
469 /*
470 * total memory / bus width = transactions needed
471 * transactions needed / data rate = seconds
472 * to add plenty of buffer, double the time
473 * For example, 2GB on 666MT/s 64-bit bus takes about 402ms
474 * Let's wait for 800ms
475 */
476 bus_width = 3 - ((ddr_in32(&ddr->sdram_cfg) & SDRAM_CFG_DBW_MASK)
477 >> SDRAM_CFG_DBW_SHIFT);
478 timeout = ((total_mem_per_ctrl_adj << (6 - bus_width)) * 100 /
479 (clk >> 20)) << 2;
480 total_mem_per_ctrl_adj >>= 4; /* shift down to gb size */
481 if ((ddr_in32(&ddr->sdram_cfg_2) & SDRAM_CFG2_D_INIT) != 0) {
482 debug("total size %d GB\n", total_mem_per_ctrl_adj);
483 debug("Need to wait up to %d ms\n", timeout * 10);
484
485 do {
486 mdelay(10);
487 } while (timeout-- > 0 &&
488 ((ddr_in32(&ddr->sdram_cfg_2) & SDRAM_CFG2_D_INIT)) != 0);
489
490 if (timeout <= 0) {
491 if (ddr_in32(&ddr->debug[1]) & 0x3d00) {
492 ERROR("Found training error(s): 0x%x\n",
493 ddr_in32(&ddr->debug[1]));
494 }
495 ERROR("Error: Waiting for D_INIT timeout.\n");
496 return -EIO;
497 }
498 }
499
500 if (mod_bnds != 0U) {
501 debug("Restore original bnds\n");
502 for (i = 0U; i < DDRC_NUM_CS; i++) {
503 ddr_out32(&ddr->bnds[i].a, regs->cs[i].bnds);
504 }
505 ddr_out32(&ddr->csn_cfg[0], regs->cs[0].config);
506#ifdef CONFIG_DDR_ADDR_DEC
507 if ((regs->dec[9] & U(0x1)) != 0U) {
508 debug("Restore address decoding\n");
509 ddr_out32(&ddr->dec[9], regs->dec[9]);
510 }
511#endif
512 }
513
514#ifdef ERRATA_DDR_A009803
515 /* Part 2 of 2 */
516 if ((regs->sdram_cfg[1] & SDRAM_CFG2_AP_EN) != 0) {
517 timeout = 400;
518 do {
519 mdelay(1);
520 } while (timeout-- > 0 && ((ddr_in32(&ddr->debug[1]) & 0x2) == 0));
521
522 if ((regs->sdram_cfg[0] & SDRAM_CFG_RD_EN) != 0) {
523 for (i = 0U; i < DDRC_NUM_CS; i++) {
524 if ((regs->cs[i].config & SDRAM_CS_CONFIG_EN) == 0) {
525 continue;
526 }
527 set_wait_for_bits_clear(&ddr->sdram_md_cntl,
528 MD_CNTL_MD_EN |
529 MD_CNTL_CS_SEL(i) |
530 0x070000ed,
531 MD_CNTL_MD_EN);
532 udelay(1);
533 }
534 }
535
536 ddr_out32(&ddr->err_disable,
537 regs->err_disable & ~DDR_ERR_DISABLE_APED);
538 }
539#endif
540
541#ifdef ERRATA_DDR_A009663
542 ddr_out32(&ddr->sdram_interval, regs->interval);
543#endif
544
545#ifdef ERRATA_DDR_A009942
546 timeout = 400;
547 do {
548 mdelay(1);
549 } while (timeout-- > 0 && ((ddr_in32(&ddr->debug[1]) & 0x2) == 0));
550 tmp = (regs->sdram_cfg[0] >> 19) & 0x3;
551 check = (tmp == DDR_DBUS_64) ? 4 : ((tmp == DDR_DBUS_32) ? 2 : 1);
552 for (i = 0; i < check; i++) {
553 tmp = ddr_in32(&ddr->debug[9 + i]);
554 debug("Reading debug[%d] as 0x%x\n", i + 9, tmp);
555 cpo_min = min(cpo_min,
556 min((tmp >> 24) & 0xff, (tmp >> 8) & 0xff));
557 cpo_max = max(cpo_max,
558 max((tmp >> 24) & 0xff, (tmp >> 8) & 0xff));
559 }
560 if ((regs->sdram_cfg[0] & SDRAM_CFG_ECC_EN) != 0) {
561 tmp = ddr_in32(&ddr->debug[13]);
562 cpo_min = min(cpo_min, (tmp >> 24) & 0xff);
563 cpo_max = max(cpo_max, (tmp >> 24) & 0xff);
564 }
565 debug("cpo_min 0x%x\n", cpo_min);
566 debug("cpo_max 0x%x\n", cpo_max);
567 tmp = ddr_in32(&ddr->debug[28]);
568 debug("debug[28] 0x%x\n", tmp);
569 if ((cpo_min + 0x3B) < (tmp & 0xff)) {
570 WARN("Warning: A009942 requires setting cpo_sample to 0x%x\n",
571 (cpo_min + cpo_max) / 2 + 0x27);
572 } else {
573 debug("Optimal cpo_sample 0x%x\n",
574 (cpo_min + cpo_max) / 2 + 0x27);
575 }
576#endif
577 if (run_bist() != 0) {
578 if ((ddr_in32(&ddr->debug[1]) &
579 ((get_ddrc_version(ddr) == 0x50500) ? 0x3c00 : 0x3d00)) != 0) {
580 ERROR("Found training error(s): 0x%x\n",
581 ddr_in32(&ddr->debug[1]));
582 return -EIO;
583 }
584 INFO("Running built-in self test ...\n");
585 /* give it 10x time to cover whole memory */
586 timeout = ((total_mem_per_ctrl << (6 - bus_width)) *
587 100 / (clk >> 20)) * 10;
588 INFO("\tWait up to %d ms\n", timeout * 10);
589 ret = bist(ddr, timeout);
590 }
591 dump_ddrc((void *)ddr);
592
593 return ret;
594}