blob: 556f877039aa0bb488d0a279d41d54d90c378b71 [file] [log] [blame]
Stefan Roese5ffceb82015-03-26 15:36:56 +01001/*
2 * Copyright (C) Marvell International Ltd. and its affiliates
3 *
4 * SPDX-License-Identifier: GPL-2.0
5 */
6
7#include <common.h>
8#include <i2c.h>
9#include <spl.h>
10#include <asm/io.h>
11#include <asm/arch/cpu.h>
12#include <asm/arch/soc.h>
13
14#include "ddr3_init.h"
15
16#include "../../../../arch/arm/mach-mvebu/serdes/a38x/sys_env_lib.h"
17
18static struct dlb_config ddr3_dlb_config_table[] = {
19 {REG_STATIC_DRAM_DLB_CONTROL, 0x2000005c},
20 {DLB_BUS_OPTIMIZATION_WEIGHTS_REG, 0x00880000},
21 {DLB_AGING_REGISTER, 0x0f7f007f},
22 {DLB_EVICTION_CONTROL_REG, 0x0000129f},
23 {DLB_EVICTION_TIMERS_REGISTER_REG, 0x00ff0000},
24 {DLB_BUS_WEIGHTS_DIFF_CS, 0x04030802},
25 {DLB_BUS_WEIGHTS_DIFF_BG, 0x00000a02},
26 {DLB_BUS_WEIGHTS_SAME_BG, 0x09000a01},
27 {DLB_BUS_WEIGHTS_RD_WR, 0x00020005},
28 {DLB_BUS_WEIGHTS_ATTR_SYS_PRIO, 0x00060f10},
29 {DLB_MAIN_QUEUE_MAP, 0x00000543},
30 {DLB_LINE_SPLIT, 0x00000000},
31 {DLB_USER_COMMAND_REG, 0x00000000},
32 {0x0, 0x0}
33};
34
35static struct dlb_config ddr3_dlb_config_table_a0[] = {
36 {REG_STATIC_DRAM_DLB_CONTROL, 0x2000005c},
37 {DLB_BUS_OPTIMIZATION_WEIGHTS_REG, 0x00880000},
38 {DLB_AGING_REGISTER, 0x0f7f007f},
39 {DLB_EVICTION_CONTROL_REG, 0x0000129f},
40 {DLB_EVICTION_TIMERS_REGISTER_REG, 0x00ff0000},
41 {DLB_BUS_WEIGHTS_DIFF_CS, 0x04030802},
42 {DLB_BUS_WEIGHTS_DIFF_BG, 0x00000a02},
43 {DLB_BUS_WEIGHTS_SAME_BG, 0x09000a01},
44 {DLB_BUS_WEIGHTS_RD_WR, 0x00020005},
45 {DLB_BUS_WEIGHTS_ATTR_SYS_PRIO, 0x00060f10},
46 {DLB_MAIN_QUEUE_MAP, 0x00000543},
47 {DLB_LINE_SPLIT, 0x00000000},
48 {DLB_USER_COMMAND_REG, 0x00000000},
49 {0x0, 0x0}
50};
51
52#if defined(CONFIG_ARMADA_38X)
53struct dram_modes {
54 char *mode_name;
55 u8 cpu_freq;
56 u8 fab_freq;
57 u8 chip_id;
58 u8 chip_board_rev;
59 struct reg_data *regs;
60};
61
62struct dram_modes ddr_modes[] = {
63#ifdef SUPPORT_STATIC_DUNIT_CONFIG
64 /* Conf name, CPUFreq, Fab_freq, Chip ID, Chip/Board, MC regs*/
65#ifdef CONFIG_CUSTOMER_BOARD_SUPPORT
66 {"a38x_customer_0_800", DDR_FREQ_800, 0, 0x0, A38X_CUSTOMER_BOARD_ID0,
67 ddr3_customer_800},
68 {"a38x_customer_1_800", DDR_FREQ_800, 0, 0x0, A38X_CUSTOMER_BOARD_ID1,
69 ddr3_customer_800},
70#else
71 {"a38x_533", DDR_FREQ_533, 0, 0x0, MARVELL_BOARD, ddr3_a38x_533},
72 {"a38x_667", DDR_FREQ_667, 0, 0x0, MARVELL_BOARD, ddr3_a38x_667},
73 {"a38x_800", DDR_FREQ_800, 0, 0x0, MARVELL_BOARD, ddr3_a38x_800},
74 {"a38x_933", DDR_FREQ_933, 0, 0x0, MARVELL_BOARD, ddr3_a38x_933},
75#endif
76#endif
77};
78#endif /* defined(CONFIG_ARMADA_38X) */
79
80/* Translates topology map definitions to real memory size in bits */
81u32 mem_size[] = {
82 ADDR_SIZE_512MB, ADDR_SIZE_1GB, ADDR_SIZE_2GB, ADDR_SIZE_4GB,
83 ADDR_SIZE_8GB
84};
85
86static char *ddr_type = "DDR3";
87
88/*
89 * Set 1 to use dynamic DUNIT configuration,
90 * set 0 (supported for A380 and AC3) to configure DUNIT in values set by
91 * ddr3_tip_init_specific_reg_config
92 */
93u8 generic_init_controller = 1;
94
95#ifdef SUPPORT_STATIC_DUNIT_CONFIG
96static u32 ddr3_get_static_ddr_mode(void);
97#endif
98static int ddr3_hws_tune_training_params(u8 dev_num);
Stefan Roese5ffceb82015-03-26 15:36:56 +010099
100/* device revision */
101#define DEV_VERSION_ID_REG 0x1823c
102#define REVISON_ID_OFFS 8
103#define REVISON_ID_MASK 0xf00
104
105/* A38x revisions */
106#define MV_88F68XX_Z1_ID 0x0
107#define MV_88F68XX_A0_ID 0x4
108/* A39x revisions */
109#define MV_88F69XX_Z1_ID 0x2
110
111/*
112 * sys_env_device_rev_get - Get Marvell controller device revision number
113 *
114 * DESCRIPTION:
115 * This function returns 8bit describing the device revision as defined
116 * Revision ID Register.
117 *
118 * INPUT:
119 * None.
120 *
121 * OUTPUT:
122 * None.
123 *
124 * RETURN:
125 * 8bit desscribing Marvell controller revision number
126 */
127u8 sys_env_device_rev_get(void)
128{
129 u32 value;
130
131 value = reg_read(DEV_VERSION_ID_REG);
132 return (value & (REVISON_ID_MASK)) >> REVISON_ID_OFFS;
133}
134
135/*
136 * sys_env_dlb_config_ptr_get
137 *
138 * DESCRIPTION: defines pointer to to DLB COnfiguration table
139 *
140 * INPUT: none
141 *
142 * OUTPUT: pointer to DLB COnfiguration table
143 *
144 * RETURN:
145 * returns pointer to DLB COnfiguration table
146 */
147struct dlb_config *sys_env_dlb_config_ptr_get(void)
148{
149#ifdef CONFIG_ARMADA_39X
150 return &ddr3_dlb_config_table_a0[0];
151#else
152 if (sys_env_device_rev_get() == MV_88F68XX_A0_ID)
153 return &ddr3_dlb_config_table_a0[0];
154 else
155 return &ddr3_dlb_config_table[0];
156#endif
157}
158
159/*
160 * sys_env_get_cs_ena_from_reg
161 *
162 * DESCRIPTION: Get bit mask of enabled CS
163 *
164 * INPUT: None
165 *
166 * OUTPUT: None
167 *
168 * RETURN:
169 * Bit mask of enabled CS, 1 if only CS0 enabled,
170 * 3 if both CS0 and CS1 enabled
171 */
172u32 sys_env_get_cs_ena_from_reg(void)
173{
174 return reg_read(REG_DDR3_RANK_CTRL_ADDR) &
175 REG_DDR3_RANK_CTRL_CS_ENA_MASK;
176}
177
178static void ddr3_restore_and_set_final_windows(u32 *win)
179{
180 u32 win_ctrl_reg, num_of_win_regs;
181 u32 cs_ena = sys_env_get_cs_ena_from_reg();
182 u32 ui;
183
184 win_ctrl_reg = REG_XBAR_WIN_4_CTRL_ADDR;
185 num_of_win_regs = 16;
186
187 /* Return XBAR windows 4-7 or 16-19 init configuration */
188 for (ui = 0; ui < num_of_win_regs; ui++)
189 reg_write((win_ctrl_reg + 0x4 * ui), win[ui]);
190
191 printf("%s Training Sequence - Switching XBAR Window to FastPath Window\n",
192 ddr_type);
193
194#if defined DYNAMIC_CS_SIZE_CONFIG
195 if (ddr3_fast_path_dynamic_cs_size_config(cs_ena) != MV_OK)
196 printf("ddr3_fast_path_dynamic_cs_size_config FAILED\n");
197#else
198 u32 reg, cs;
199 reg = 0x1fffffe1;
200 for (cs = 0; cs < MAX_CS; cs++) {
201 if (cs_ena & (1 << cs)) {
202 reg |= (cs << 2);
203 break;
204 }
205 }
206 /* Open fast path Window to - 0.5G */
207 reg_write(REG_FASTPATH_WIN_0_CTRL_ADDR, reg);
208#endif
209}
210
211static int ddr3_save_and_set_training_windows(u32 *win)
212{
213 u32 cs_ena;
214 u32 reg, tmp_count, cs, ui;
215 u32 win_ctrl_reg, win_base_reg, win_remap_reg;
216 u32 num_of_win_regs, win_jump_index;
217 win_ctrl_reg = REG_XBAR_WIN_4_CTRL_ADDR;
218 win_base_reg = REG_XBAR_WIN_4_BASE_ADDR;
219 win_remap_reg = REG_XBAR_WIN_4_REMAP_ADDR;
220 win_jump_index = 0x10;
221 num_of_win_regs = 16;
222 struct hws_topology_map *tm = ddr3_get_topology_map();
223
224#ifdef DISABLE_L2_FILTERING_DURING_DDR_TRAINING
225 /*
226 * Disable L2 filtering during DDR training
227 * (when Cross Bar window is open)
228 */
229 reg_write(ADDRESS_FILTERING_END_REGISTER, 0);
230#endif
231
232 cs_ena = tm->interface_params[0].as_bus_params[0].cs_bitmask;
233
234 /* Close XBAR Window 19 - Not needed */
235 /* {0x000200e8} - Open Mbus Window - 2G */
236 reg_write(REG_XBAR_WIN_19_CTRL_ADDR, 0);
237
238 /* Save XBAR Windows 4-19 init configurations */
239 for (ui = 0; ui < num_of_win_regs; ui++)
240 win[ui] = reg_read(win_ctrl_reg + 0x4 * ui);
241
242 /* Open XBAR Windows 4-7 or 16-19 for other CS */
243 reg = 0;
244 tmp_count = 0;
245 for (cs = 0; cs < MAX_CS; cs++) {
246 if (cs_ena & (1 << cs)) {
247 switch (cs) {
248 case 0:
249 reg = 0x0e00;
250 break;
251 case 1:
252 reg = 0x0d00;
253 break;
254 case 2:
255 reg = 0x0b00;
256 break;
257 case 3:
258 reg = 0x0700;
259 break;
260 }
261 reg |= (1 << 0);
262 reg |= (SDRAM_CS_SIZE & 0xffff0000);
263
264 reg_write(win_ctrl_reg + win_jump_index * tmp_count,
265 reg);
266 reg = (((SDRAM_CS_SIZE + 1) * (tmp_count)) &
267 0xffff0000);
268 reg_write(win_base_reg + win_jump_index * tmp_count,
269 reg);
270
271 if (win_remap_reg <= REG_XBAR_WIN_7_REMAP_ADDR)
272 reg_write(win_remap_reg +
273 win_jump_index * tmp_count, 0);
274
275 tmp_count++;
276 }
277 }
278
279 return MV_OK;
280}
281
282/*
283 * Name: ddr3_init - Main DDR3 Init function
284 * Desc: This routine initialize the DDR3 MC and runs HW training.
285 * Args: None.
286 * Notes:
287 * Returns: None.
288 */
289int ddr3_init(void)
290{
291 u32 reg = 0;
292 u32 soc_num;
293 int status;
294 u32 win[16];
295
296 /* SoC/Board special Initializtions */
297 /* Get version from internal library */
298 ddr3_print_version();
299
300 /*Add sub_version string */
301 DEBUG_INIT_C("", SUB_VERSION, 1);
302
303 /* Switching CPU to MRVL ID */
304 soc_num = (reg_read(REG_SAMPLE_RESET_HIGH_ADDR) & SAR1_CPU_CORE_MASK) >>
305 SAR1_CPU_CORE_OFFSET;
306 switch (soc_num) {
307 case 0x3:
308 reg_bit_set(CPU_CONFIGURATION_REG(3), CPU_MRVL_ID_OFFSET);
309 reg_bit_set(CPU_CONFIGURATION_REG(2), CPU_MRVL_ID_OFFSET);
310 case 0x1:
311 reg_bit_set(CPU_CONFIGURATION_REG(1), CPU_MRVL_ID_OFFSET);
312 case 0x0:
313 reg_bit_set(CPU_CONFIGURATION_REG(0), CPU_MRVL_ID_OFFSET);
314 default:
315 break;
316 }
317
318 /*
319 * Set DRAM Reset Mask in case detected GPIO indication of wakeup from
320 * suspend i.e the DRAM values will not be overwritten / reset when
321 * waking from suspend
322 */
323 if (sys_env_suspend_wakeup_check() ==
324 SUSPEND_WAKEUP_ENABLED_GPIO_DETECTED) {
325 reg_bit_set(REG_SDRAM_INIT_CTRL_ADDR,
326 1 << REG_SDRAM_INIT_RESET_MASK_OFFS);
327 }
328
329 /*
330 * Stage 0 - Set board configuration
331 */
332
333 /* Check if DRAM is already initialized */
334 if (reg_read(REG_BOOTROM_ROUTINE_ADDR) &
335 (1 << REG_BOOTROM_ROUTINE_DRAM_INIT_OFFS)) {
336 printf("%s Training Sequence - 2nd boot - Skip\n", ddr_type);
337 return MV_OK;
338 }
339
340 /*
341 * Stage 1 - Dunit Setup
342 */
343
344 /* Fix read ready phases for all SOC in reg 0x15c8 */
345 reg = reg_read(REG_TRAINING_DEBUG_3_ADDR);
346 reg &= ~(REG_TRAINING_DEBUG_3_MASK);
347 reg |= 0x4; /* Phase 0 */
348 reg &= ~(REG_TRAINING_DEBUG_3_MASK << REG_TRAINING_DEBUG_3_OFFS);
349 reg |= (0x4 << (1 * REG_TRAINING_DEBUG_3_OFFS)); /* Phase 1 */
350 reg &= ~(REG_TRAINING_DEBUG_3_MASK << (3 * REG_TRAINING_DEBUG_3_OFFS));
351 reg |= (0x6 << (3 * REG_TRAINING_DEBUG_3_OFFS)); /* Phase 3 */
352 reg &= ~(REG_TRAINING_DEBUG_3_MASK << (4 * REG_TRAINING_DEBUG_3_OFFS));
353 reg |= (0x6 << (4 * REG_TRAINING_DEBUG_3_OFFS));
354 reg &= ~(REG_TRAINING_DEBUG_3_MASK << (5 * REG_TRAINING_DEBUG_3_OFFS));
355 reg |= (0x6 << (5 * REG_TRAINING_DEBUG_3_OFFS));
356 reg_write(REG_TRAINING_DEBUG_3_ADDR, reg);
357
358 /*
359 * Axi_bresp_mode[8] = Compliant,
360 * Axi_addr_decode_cntrl[11] = Internal,
361 * Axi_data_bus_width[0] = 128bit
362 * */
363 /* 0x14a8 - AXI Control Register */
364 reg_write(REG_DRAM_AXI_CTRL_ADDR, 0);
365
366 /*
367 * Stage 2 - Training Values Setup
368 */
369 /* Set X-BAR windows for the training sequence */
370 ddr3_save_and_set_training_windows(win);
371
372#ifdef SUPPORT_STATIC_DUNIT_CONFIG
373 /*
374 * Load static controller configuration (in case dynamic/generic init
375 * is not enabled
376 */
377 if (generic_init_controller == 0) {
378 ddr3_tip_init_specific_reg_config(0,
379 ddr_modes
380 [ddr3_get_static_ddr_mode
381 ()].regs);
382 }
383#endif
384
Stefan Roese5ffceb82015-03-26 15:36:56 +0100385 /* Tune training algo paramteres */
386 status = ddr3_hws_tune_training_params(0);
387 if (MV_OK != status)
388 return status;
389
390 /* Set log level for training lib */
391 ddr3_hws_set_log_level(DEBUG_BLOCK_ALL, DEBUG_LEVEL_ERROR);
392
393 /* Start New Training IP */
394 status = ddr3_hws_hw_training();
395 if (MV_OK != status) {
396 printf("%s Training Sequence - FAILED\n", ddr_type);
397 return status;
398 }
399
400 /*
401 * Stage 3 - Finish
402 */
403 /* Restore and set windows */
404 ddr3_restore_and_set_final_windows(win);
405
406 /* Update DRAM init indication in bootROM register */
407 reg = reg_read(REG_BOOTROM_ROUTINE_ADDR);
408 reg_write(REG_BOOTROM_ROUTINE_ADDR,
409 reg | (1 << REG_BOOTROM_ROUTINE_DRAM_INIT_OFFS));
410
411 /* DLB config */
412 ddr3_new_tip_dlb_config();
413
414#if defined(ECC_SUPPORT)
415 if (ddr3_if_ecc_enabled())
416 ddr3_new_tip_ecc_scrub();
417#endif
418
419 printf("%s Training Sequence - Ended Successfully\n", ddr_type);
420
421 return MV_OK;
422}
423
424/*
425 * Name: ddr3_get_cpu_freq
426 * Desc: read S@R and return CPU frequency
427 * Args:
428 * Notes:
429 * Returns: required value
430 */
431u32 ddr3_get_cpu_freq(void)
432{
433 return ddr3_tip_get_init_freq();
434}
435
436/*
437 * Name: ddr3_get_fab_opt
438 * Desc: read S@R and return CPU frequency
439 * Args:
440 * Notes:
441 * Returns: required value
442 */
443u32 ddr3_get_fab_opt(void)
444{
445 return 0; /* No fabric */
446}
447
448/*
449 * Name: ddr3_get_static_m_cValue - Init Memory controller with
450 * static parameters
451 * Desc: Use this routine to init the controller without the HW training
452 * procedure.
453 * User must provide compatible header file with registers data.
454 * Args: None.
455 * Notes:
456 * Returns: None.
457 */
458u32 ddr3_get_static_mc_value(u32 reg_addr, u32 offset1, u32 mask1,
459 u32 offset2, u32 mask2)
460{
461 u32 reg, temp;
462
463 reg = reg_read(reg_addr);
464
465 temp = (reg >> offset1) & mask1;
466 if (mask2)
467 temp |= (reg >> offset2) & mask2;
468
469 return temp;
470}
471
472/*
473 * Name: ddr3_get_static_ddr_mode - Init Memory controller with
474 * static parameters
475 * Desc: Use this routine to init the controller without the HW training
476 * procedure.
477 * User must provide compatible header file with registers data.
478 * Args: None.
479 * Notes:
480 * Returns: None.
481 */
482u32 ddr3_get_static_ddr_mode(void)
483{
484 u32 chip_board_rev, i;
485 u32 size;
486
487 /* Valid only for A380 only, MSYS using dynamic controller config */
488#ifdef CONFIG_CUSTOMER_BOARD_SUPPORT
489 /*
490 * Customer boards select DDR mode according to
491 * board ID & Sample@Reset
492 */
493 chip_board_rev = mv_board_id_get();
494#else
495 /* Marvell boards select DDR mode according to Sample@Reset only */
496 chip_board_rev = MARVELL_BOARD;
497#endif
498
499 size = ARRAY_SIZE(ddr_modes);
500 for (i = 0; i < size; i++) {
501 if ((ddr3_get_cpu_freq() == ddr_modes[i].cpu_freq) &&
502 (ddr3_get_fab_opt() == ddr_modes[i].fab_freq) &&
503 (chip_board_rev == ddr_modes[i].chip_board_rev))
504 return i;
505 }
506
507 DEBUG_INIT_S("\n*** Error: ddr3_get_static_ddr_mode: No match for requested DDR mode. ***\n\n");
508
509 return 0;
510}
511
512/******************************************************************************
513 * Name: ddr3_get_cs_num_from_reg
514 * Desc:
515 * Args:
516 * Notes:
517 * Returns:
518 */
519u32 ddr3_get_cs_num_from_reg(void)
520{
521 u32 cs_ena = sys_env_get_cs_ena_from_reg();
522 u32 cs_count = 0;
523 u32 cs;
524
525 for (cs = 0; cs < MAX_CS; cs++) {
526 if (cs_ena & (1 << cs))
527 cs_count++;
528 }
529
530 return cs_count;
531}
532
Stefan Roese5ffceb82015-03-26 15:36:56 +0100533void get_target_freq(u32 freq_mode, u32 *ddr_freq, u32 *hclk_ps)
534{
535 u32 tmp, hclk = 200;
536
537 switch (freq_mode) {
538 case 4:
539 tmp = 1; /* DDR_400; */
540 hclk = 200;
541 break;
542 case 0x8:
543 tmp = 1; /* DDR_666; */
544 hclk = 333;
545 break;
546 case 0xc:
547 tmp = 1; /* DDR_800; */
548 hclk = 400;
549 break;
550 default:
551 *ddr_freq = 0;
552 *hclk_ps = 0;
553 break;
554 }
555
556 *ddr_freq = tmp; /* DDR freq define */
557 *hclk_ps = 1000000 / hclk; /* values are 1/HCLK in ps */
558
559 return;
560}
561
562void ddr3_new_tip_dlb_config(void)
563{
564 u32 reg, i = 0;
565 struct dlb_config *config_table_ptr = sys_env_dlb_config_ptr_get();
566
567 /* Write the configuration */
568 while (config_table_ptr[i].reg_addr != 0) {
569 reg_write(config_table_ptr[i].reg_addr,
570 config_table_ptr[i].reg_data);
571 i++;
572 }
573
574 /* Enable DLB */
575 reg = reg_read(REG_STATIC_DRAM_DLB_CONTROL);
576 reg |= DLB_ENABLE | DLB_WRITE_COALESING | DLB_AXI_PREFETCH_EN |
577 DLB_MBUS_PREFETCH_EN | PREFETCH_N_LN_SZ_TR;
578 reg_write(REG_STATIC_DRAM_DLB_CONTROL, reg);
579}
580
581int ddr3_fast_path_dynamic_cs_size_config(u32 cs_ena)
582{
583 u32 reg, cs;
584 u32 mem_total_size = 0;
585 u32 cs_mem_size = 0;
586 u32 mem_total_size_c, cs_mem_size_c;
587
588#ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
589 u32 physical_mem_size;
590 u32 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE;
591 struct hws_topology_map *tm = ddr3_get_topology_map();
592#endif
593
594 /* Open fast path windows */
595 for (cs = 0; cs < MAX_CS; cs++) {
596 if (cs_ena & (1 << cs)) {
597 /* get CS size */
598 if (ddr3_calc_mem_cs_size(cs, &cs_mem_size) != MV_OK)
599 return MV_FAIL;
600
601#ifdef DEVICE_MAX_DRAM_ADDRESS_SIZE
602 /*
603 * if number of address pins doesn't allow to use max
604 * mem size that is defined in topology
605 * mem size is defined by DEVICE_MAX_DRAM_ADDRESS_SIZE
606 */
607 physical_mem_size = mem_size
608 [tm->interface_params[0].memory_size];
609
610 if (ddr3_get_device_width(cs) == 16) {
611 /*
612 * 16bit mem device can be twice more - no need
613 * in less significant pin
614 */
615 max_mem_size = DEVICE_MAX_DRAM_ADDRESS_SIZE * 2;
616 }
617
618 if (physical_mem_size > max_mem_size) {
619 cs_mem_size = max_mem_size *
620 (ddr3_get_bus_width() /
621 ddr3_get_device_width(cs));
622 printf("Updated Physical Mem size is from 0x%x to %x\n",
623 physical_mem_size,
624 DEVICE_MAX_DRAM_ADDRESS_SIZE);
625 }
626#endif
627
628 /* set fast path window control for the cs */
629 reg = 0xffffe1;
630 reg |= (cs << 2);
631 reg |= (cs_mem_size - 1) & 0xffff0000;
632 /*Open fast path Window */
633 reg_write(REG_FASTPATH_WIN_CTRL_ADDR(cs), reg);
634
635 /* Set fast path window base address for the cs */
636 reg = ((cs_mem_size) * cs) & 0xffff0000;
637 /* Set base address */
638 reg_write(REG_FASTPATH_WIN_BASE_ADDR(cs), reg);
639
640 /*
641 * Since memory size may be bigger than 4G the summ may
642 * be more than 32 bit word,
643 * so to estimate the result divide mem_total_size and
644 * cs_mem_size by 0x10000 (it is equal to >> 16)
645 */
646 mem_total_size_c = mem_total_size >> 16;
647 cs_mem_size_c = cs_mem_size >> 16;
648 /* if the sum less than 2 G - calculate the value */
649 if (mem_total_size_c + cs_mem_size_c < 0x10000)
650 mem_total_size += cs_mem_size;
651 else /* put max possible size */
652 mem_total_size = L2_FILTER_FOR_MAX_MEMORY_SIZE;
653 }
654 }
655
656 /* Set L2 filtering to Max Memory size */
657 reg_write(ADDRESS_FILTERING_END_REGISTER, mem_total_size);
658
659 return MV_OK;
660}
661
662u32 ddr3_get_bus_width(void)
663{
664 u32 bus_width;
665
666 bus_width = (reg_read(REG_SDRAM_CONFIG_ADDR) & 0x8000) >>
667 REG_SDRAM_CONFIG_WIDTH_OFFS;
668
669 return (bus_width == 0) ? 16 : 32;
670}
671
672u32 ddr3_get_device_width(u32 cs)
673{
674 u32 device_width;
675
676 device_width = (reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR) &
677 (0x3 << (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs))) >>
678 (REG_SDRAM_ADDRESS_CTRL_STRUCT_OFFS * cs);
679
680 return (device_width == 0) ? 8 : 16;
681}
682
683float ddr3_get_device_size(u32 cs)
684{
685 u32 device_size_low, device_size_high, device_size;
686 u32 data, cs_low_offset, cs_high_offset;
687
688 cs_low_offset = REG_SDRAM_ADDRESS_SIZE_OFFS + cs * 4;
689 cs_high_offset = REG_SDRAM_ADDRESS_SIZE_OFFS +
690 REG_SDRAM_ADDRESS_SIZE_HIGH_OFFS + cs;
691
692 data = reg_read(REG_SDRAM_ADDRESS_CTRL_ADDR);
693 device_size_low = (data >> cs_low_offset) & 0x3;
694 device_size_high = (data >> cs_high_offset) & 0x1;
695
696 device_size = device_size_low | (device_size_high << 2);
697
698 switch (device_size) {
699 case 0:
700 return 2;
701 case 2:
702 return 0.5;
703 case 3:
704 return 1;
705 case 4:
706 return 4;
707 case 5:
708 return 8;
709 case 1:
710 default:
711 DEBUG_INIT_C("Error: Wrong device size of Cs: ", cs, 1);
712 /*
713 * Small value will give wrong emem size in
714 * ddr3_calc_mem_cs_size
715 */
716 return 0.01;
717 }
718}
719
720int ddr3_calc_mem_cs_size(u32 cs, u32 *cs_size)
721{
722 float cs_mem_size;
723
724 /* Calculate in GiB */
725 cs_mem_size = ((ddr3_get_bus_width() / ddr3_get_device_width(cs)) *
726 ddr3_get_device_size(cs)) / 8;
727
728 /*
729 * Multiple controller bus width, 2x for 64 bit
730 * (SoC controller may be 32 or 64 bit,
731 * so bit 15 in 0x1400, that means if whole bus used or only half,
732 * have a differnt meaning
733 */
734 cs_mem_size *= DDR_CONTROLLER_BUS_WIDTH_MULTIPLIER;
735
736 if (cs_mem_size == 0.125) {
737 *cs_size = 128 << 20;
738 } else if (cs_mem_size == 0.25) {
739 *cs_size = 256 << 20;
740 } else if (cs_mem_size == 0.5) {
741 *cs_size = 512 << 20;
742 } else if (cs_mem_size == 1) {
743 *cs_size = 1 << 30;
744 } else if (cs_mem_size == 2) {
745 *cs_size = 2 << 30;
746 } else {
747 DEBUG_INIT_C("Error: Wrong Memory size of Cs: ", cs, 1);
748 return MV_BAD_VALUE;
749 }
750
751 return MV_OK;
752}
753
Stefan Roese5ffceb82015-03-26 15:36:56 +0100754/*
755 * Name: ddr3_hws_tune_training_params
756 * Desc:
757 * Args:
758 * Notes: Tune internal training params
759 * Returns:
760 */
761static int ddr3_hws_tune_training_params(u8 dev_num)
762{
763 struct tune_train_params params;
764 int status;
765
766 /* NOTE: do not remove any field initilization */
767 params.ck_delay = TUNE_TRAINING_PARAMS_CK_DELAY;
768 params.ck_delay_16 = TUNE_TRAINING_PARAMS_CK_DELAY_16;
769 params.p_finger = TUNE_TRAINING_PARAMS_PFINGER;
770 params.n_finger = TUNE_TRAINING_PARAMS_NFINGER;
771 params.phy_reg3_val = TUNE_TRAINING_PARAMS_PHYREG3VAL;
772
773 status = ddr3_tip_tune_training_params(dev_num, &params);
774 if (MV_OK != status) {
775 printf("%s Training Sequence - FAILED\n", ddr_type);
776 return status;
777 }
778
779 return MV_OK;
780}