blob: 71636276d4ecef98e3b17f4bc1bed45f33955c4e [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Chandan Nath98b036e2011-10-14 02:58:24 +00002/*
3 * DDR Configuration for AM33xx devices.
4 *
Wolfgang Denkd79de1d2013-07-08 09:37:19 +02005 * Copyright (C) 2011 Texas Instruments Incorporated - http://www.ti.com/
Chandan Nath98b036e2011-10-14 02:58:24 +00006 */
7
Simon Glass1e268642020-05-10 11:39:55 -06008#include <common.h>
Simon Glass0f2af882020-05-10 11:40:05 -06009#include <log.h>
Chandan Nath98b036e2011-10-14 02:58:24 +000010#include <asm/arch/cpu.h>
11#include <asm/arch/ddr_defs.h>
Satyanarayana, Sandhya11784752012-08-09 18:29:57 +000012#include <asm/arch/sys_proto.h>
Chandan Nath98b036e2011-10-14 02:58:24 +000013#include <asm/io.h>
Tom Rini0d654712012-05-29 09:02:15 -070014#include <asm/emif.h>
Chandan Nath98b036e2011-10-14 02:58:24 +000015
16/**
17 * Base address for EMIF instances
18 */
Matt Porter65991ec2013-03-15 10:07:03 +000019static struct emif_reg_struct *emif_reg[2] = {
20 (struct emif_reg_struct *)EMIF4_0_CFG_BASE,
21 (struct emif_reg_struct *)EMIF4_1_CFG_BASE};
Chandan Nath98b036e2011-10-14 02:58:24 +000022
23/**
Matt Porter65991ec2013-03-15 10:07:03 +000024 * Base addresses for DDR PHY cmd/data regs
Chandan Nath98b036e2011-10-14 02:58:24 +000025 */
Matt Porter65991ec2013-03-15 10:07:03 +000026static struct ddr_cmd_regs *ddr_cmd_reg[2] = {
27 (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR,
28 (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR2};
29
30static struct ddr_data_regs *ddr_data_reg[2] = {
31 (struct ddr_data_regs *)DDR_PHY_DATA_ADDR,
32 (struct ddr_data_regs *)DDR_PHY_DATA_ADDR2};
Chandan Nath98b036e2011-10-14 02:58:24 +000033
34/**
35 * Base address for ddr io control instances
36 */
37static struct ddr_cmdtctrl *ioctrl_reg = {
38 (struct ddr_cmdtctrl *)DDR_CONTROL_BASE_ADDR};
39
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +053040static inline u32 get_mr(int nr, u32 cs, u32 mr_addr)
41{
42 u32 mr;
43
44 mr_addr |= cs << EMIF_REG_CS_SHIFT;
45 writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
46
47 mr = readl(&emif_reg[nr]->emif_lpddr2_mode_reg_data);
48 debug("get_mr: EMIF1 cs %d mr %08x val 0x%x\n", cs, mr_addr, mr);
49 if (((mr & 0x0000ff00) >> 8) == (mr & 0xff) &&
50 ((mr & 0x00ff0000) >> 16) == (mr & 0xff) &&
51 ((mr & 0xff000000) >> 24) == (mr & 0xff))
52 return mr & 0xff;
53 else
54 return mr;
55}
56
57static inline void set_mr(int nr, u32 cs, u32 mr_addr, u32 mr_val)
58{
59 mr_addr |= cs << EMIF_REG_CS_SHIFT;
60 writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
61 writel(mr_val, &emif_reg[nr]->emif_lpddr2_mode_reg_data);
62}
63
64static void configure_mr(int nr, u32 cs)
65{
66 u32 mr_addr;
67
68 while (get_mr(nr, cs, LPDDR2_MR0) & LPDDR2_MR0_DAI_MASK)
69 ;
70 set_mr(nr, cs, LPDDR2_MR10, 0x56);
71
72 set_mr(nr, cs, LPDDR2_MR1, 0x43);
73 set_mr(nr, cs, LPDDR2_MR2, 0x2);
74
75 mr_addr = LPDDR2_MR2 | EMIF_REG_REFRESH_EN_MASK;
76 set_mr(nr, cs, mr_addr, 0x2);
77}
78
79/*
James Doublesin53c723b2014-12-22 16:26:11 -060080 * Configure EMIF4D5 registers and MR registers For details about these magic
81 * values please see the EMIF registers section of the TRM.
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +053082 */
83void config_sdram_emif4d5(const struct emif_regs *regs, int nr)
84{
Brad Griffisd2e56a72019-04-29 09:59:31 +053085#ifdef CONFIG_AM43XX
86 struct prm_device_inst *prm_device =
87 (struct prm_device_inst *)PRM_DEVICE_INST;
88#endif
89
Dave Gerlachd9e2d262014-02-18 07:31:59 -050090 writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl);
91 writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl_shdw);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +053092 writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
93
94 writel(regs->temp_alert_config, &emif_reg[nr]->emif_temp_alert_config);
95 writel(regs->emif_rd_wr_lvl_rmp_win,
96 &emif_reg[nr]->emif_rd_wr_lvl_rmp_win);
97 writel(regs->emif_rd_wr_lvl_rmp_ctl,
98 &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
99 writel(regs->emif_rd_wr_lvl_ctl, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
100 writel(regs->emif_rd_wr_exec_thresh,
101 &emif_reg[nr]->emif_rd_wr_exec_thresh);
102
Cooper Jr., Franklindf25e352014-06-27 13:31:15 -0500103 /*
104 * for most SOCs these registers won't need to be changed so only
105 * write to these registers if someone explicitly has set the
106 * register's value.
107 */
108 if(regs->emif_cos_config) {
109 writel(regs->emif_prio_class_serv_map, &emif_reg[nr]->emif_prio_class_serv_map);
110 writel(regs->emif_connect_id_serv_1_map, &emif_reg[nr]->emif_connect_id_serv_1_map);
111 writel(regs->emif_connect_id_serv_2_map, &emif_reg[nr]->emif_connect_id_serv_2_map);
112 writel(regs->emif_cos_config, &emif_reg[nr]->emif_cos_config);
113 }
114
James Doublesin53c723b2014-12-22 16:26:11 -0600115 /*
116 * Sequence to ensure that the PHY is in a known state prior to
117 * startting hardware leveling. Also acts as to latch some state from
118 * the EMIF into the PHY.
119 */
120 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
121 writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
122 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
123
124 clrbits_le32(&emif_reg[nr]->emif_sdram_ref_ctrl,
125 EMIF_REG_INITREF_DIS_MASK);
126
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530127 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Dave Gerlach84d41132014-02-18 07:32:00 -0500128 writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
Russ Dillc61bc0b2016-07-21 04:28:31 -0700129
130 /* Wait 1ms because of L3 timeout error */
131 udelay(1000);
132
James Doublesin53c723b2014-12-22 16:26:11 -0600133 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
134 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
135
Brad Griffisd2e56a72019-04-29 09:59:31 +0530136#ifdef CONFIG_AM43XX
137 /*
138 * Disable EMIF_DEVOFF
139 * -> Cold Boot: This is just rewriting the default register value.
140 * -> RTC Resume: Must disable DEVOFF before leveling.
141 */
142 writel(0, &prm_device->emif_ctrl);
143#endif
144
Tom Rinibe8d6352015-06-05 15:51:11 +0530145 /* Perform hardware leveling for DDR3 */
146 if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3) {
Tom Rinibe8d6352015-06-05 15:51:11 +0530147 writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36) |
148 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
149 writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw) |
150 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
James Doublesin53c723b2014-12-22 16:26:11 -0600151
Tom Rinibe8d6352015-06-05 15:51:11 +0530152 writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
James Doublesin53c723b2014-12-22 16:26:11 -0600153
Tom Rinibe8d6352015-06-05 15:51:11 +0530154 /* Enable read leveling */
155 writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
James Doublesin53c723b2014-12-22 16:26:11 -0600156
Brad Griffiscc4f84f2019-04-29 09:59:29 +0530157 /* Wait 1ms because of L3 timeout error */
158 udelay(1000);
159
Tom Rinibe8d6352015-06-05 15:51:11 +0530160 /*
161 * Enable full read and write leveling. Wait for read and write
162 * leveling bit to clear RDWRLVLFULL_START bit 31
163 */
164 while ((readl(&emif_reg[nr]->emif_rd_wr_lvl_ctl) & 0x80000000)
165 != 0)
166 ;
James Doublesin53c723b2014-12-22 16:26:11 -0600167
Tom Rinibe8d6352015-06-05 15:51:11 +0530168 /* Check the timeout register to see if leveling is complete */
169 if ((readl(&emif_reg[nr]->emif_status) & 0x70) != 0)
170 puts("DDR3 H/W leveling incomplete with errors\n");
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530171
Tom Rinibe8d6352015-06-05 15:51:11 +0530172 } else {
173 /* DDR2 */
Lokesh Vutladd0037a2013-12-10 15:02:23 +0530174 configure_mr(nr, 0);
175 configure_mr(nr, 1);
176 }
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530177}
178
Chandan Nath98b036e2011-10-14 02:58:24 +0000179/**
Chandan Nath98b036e2011-10-14 02:58:24 +0000180 * Configure SDRAM
181 */
Matt Porter65991ec2013-03-15 10:07:03 +0000182void config_sdram(const struct emif_regs *regs, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000183{
Tom Rinifbb25522017-05-16 14:46:35 -0400184#ifdef CONFIG_TI816X
185 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
186 writel(regs->emif_ddr_phy_ctlr_1, &emif_reg[nr]->emif_ddr_phy_ctrl_1);
187 writel(regs->emif_ddr_phy_ctlr_1, &emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw);
188 writel(0x0000613B, &emif_reg[nr]->emif_sdram_ref_ctrl); /* initially a large refresh period */
189 writel(0x1000613B, &emif_reg[nr]->emif_sdram_ref_ctrl); /* trigger initialization */
190 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
191#else
Tom Rini1b669fd2013-02-26 16:35:33 -0500192 if (regs->zq_config) {
Matt Porter65991ec2013-03-15 10:07:03 +0000193 writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
Satyanarayana, Sandhya11784752012-08-09 18:29:57 +0000194 writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
Matt Porter65991ec2013-03-15 10:07:03 +0000195 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Egli, Samuel7be78742015-12-02 15:27:56 +0100196
197 /* Trigger initialization */
198 writel(0x00003100, &emif_reg[nr]->emif_sdram_ref_ctrl);
199 /* Wait 1ms because of L3 timeout error */
200 udelay(1000);
201
202 /* Write proper sdram_ref_cref_ctrl value */
Matt Porter65991ec2013-03-15 10:07:03 +0000203 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
204 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
Satyanarayana, Sandhya11784752012-08-09 18:29:57 +0000205 }
Matt Porter65991ec2013-03-15 10:07:03 +0000206 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
207 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
Tom Rinie26108b2015-04-02 16:01:33 -0400208 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Jyri Sarha8d2998b2016-12-09 12:29:13 +0200209
210 /* Write REG_COS_COUNT_1, REG_COS_COUNT_2, and REG_PR_OLD_COUNT. */
211 if (regs->ocp_config)
212 writel(regs->ocp_config, &emif_reg[nr]->emif_l3_config);
Tom Rinifbb25522017-05-16 14:46:35 -0400213#endif
Chandan Nath98b036e2011-10-14 02:58:24 +0000214}
215
216/**
217 * Set SDRAM timings
218 */
Matt Porter65991ec2013-03-15 10:07:03 +0000219void set_sdram_timings(const struct emif_regs *regs, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000220{
Matt Porter65991ec2013-03-15 10:07:03 +0000221 writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1);
222 writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1_shdw);
223 writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2);
224 writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2_shdw);
225 writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3);
226 writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3_shdw);
Chandan Nath98b036e2011-10-14 02:58:24 +0000227}
228
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530229/*
Tom Rinibe8d6352015-06-05 15:51:11 +0530230 * Configure EXT PHY registers for software leveling
231 */
232static void ext_phy_settings_swlvl(const struct emif_regs *regs, int nr)
233{
234 u32 *ext_phy_ctrl_base = 0;
235 u32 *emif_ext_phy_ctrl_base = 0;
236 __maybe_unused const u32 *ext_phy_ctrl_const_regs;
237 u32 i = 0;
238 __maybe_unused u32 size;
239
240 ext_phy_ctrl_base = (u32 *)&(regs->emif_ddr_ext_phy_ctrl_1);
241 emif_ext_phy_ctrl_base =
242 (u32 *)&(emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
243
244 /* Configure external phy control timing registers */
245 for (i = 0; i < EMIF_EXT_PHY_CTRL_TIMING_REG; i++) {
246 writel(*ext_phy_ctrl_base, emif_ext_phy_ctrl_base++);
247 /* Update shadow registers */
248 writel(*ext_phy_ctrl_base++, emif_ext_phy_ctrl_base++);
249 }
250
251#ifdef CONFIG_AM43XX
252 /*
253 * External phy 6-24 registers do not change with ddr frequency.
254 * These only need to be set on DDR2 on AM43xx.
255 */
256 emif_get_ext_phy_ctrl_const_regs(&ext_phy_ctrl_const_regs, &size);
257
258 if (!size)
259 return;
260
261 for (i = 0; i < size; i++) {
262 writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
263 /* Update shadow registers */
264 writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
265 }
266#endif
267}
268
269/*
James Doublesin53c723b2014-12-22 16:26:11 -0600270 * Configure EXT PHY registers for hardware leveling
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530271 */
Tom Rinibe8d6352015-06-05 15:51:11 +0530272static void ext_phy_settings_hwlvl(const struct emif_regs *regs, int nr)
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530273{
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530274 /*
James Doublesin53c723b2014-12-22 16:26:11 -0600275 * Enable hardware leveling on the EMIF. For details about these
276 * magic values please see the EMIF registers section of the TRM.
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530277 */
Brad Griffis192121a2019-04-29 09:59:28 +0530278 if (regs->emif_ddr_phy_ctlr_1 & 0x00040000) {
279 /* PHY_INVERT_CLKOUT = 1 */
280 writel(0x00040100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
281 writel(0x00040100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw);
282 } else {
283 /* PHY_INVERT_CLKOUT = 0 */
284 writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
285 writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw);
286 }
287
James Doublesin53c723b2014-12-22 16:26:11 -0600288 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22);
289 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22_shdw);
290 writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23);
291 writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23_shdw);
292 writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24);
293 writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24_shdw);
294 writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25);
295 writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25_shdw);
296 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26);
297 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26_shdw);
298 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27);
299 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27_shdw);
300 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28);
301 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28_shdw);
302 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29);
303 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29_shdw);
304 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30);
305 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30_shdw);
306 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31);
307 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31_shdw);
308 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32);
309 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32_shdw);
310 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33);
311 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33_shdw);
312 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34);
313 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34_shdw);
314 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35);
315 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35_shdw);
Brad Griffis904549f2019-04-29 09:59:32 +0530316 writel(0x00000077, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
317 writel(0x00000077, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530318
James Doublesin53c723b2014-12-22 16:26:11 -0600319 /*
320 * Sequence to ensure that the PHY is again in a known state after
321 * hardware leveling.
322 */
323 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
324 writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
325 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530326}
327
Chandan Nath98b036e2011-10-14 02:58:24 +0000328/**
329 * Configure DDR PHY
330 */
Matt Porter65991ec2013-03-15 10:07:03 +0000331void config_ddr_phy(const struct emif_regs *regs, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000332{
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530333 /*
Russ Dillf2095a42016-07-21 04:28:32 -0700334 * Disable initialization and refreshes for now until we finish
335 * programming EMIF regs and set time between rising edge of
336 * DDR_RESET to rising edge of DDR_CKE to > 500us per memory spec.
337 * We currently hardcode a value based on a max expected frequency
338 * of 400MHz.
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530339 */
Russ Dillf2095a42016-07-21 04:28:32 -0700340 writel(EMIF_REG_INITREF_DIS_MASK | 0x3100,
341 &emif_reg[nr]->emif_sdram_ref_ctrl);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530342
Matt Porter65991ec2013-03-15 10:07:03 +0000343 writel(regs->emif_ddr_phy_ctlr_1,
344 &emif_reg[nr]->emif_ddr_phy_ctrl_1);
345 writel(regs->emif_ddr_phy_ctlr_1,
346 &emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530347
Tom Rinibe8d6352015-06-05 15:51:11 +0530348 if (get_emif_rev((u32)emif_reg[nr]) == EMIF_4D5) {
349 if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3)
350 ext_phy_settings_hwlvl(regs, nr);
351 else
352 ext_phy_settings_swlvl(regs, nr);
353 }
Chandan Nath98b036e2011-10-14 02:58:24 +0000354}
355
356/**
357 * Configure DDR CMD control registers
358 */
Matt Porter65991ec2013-03-15 10:07:03 +0000359void config_cmd_ctrl(const struct cmd_control *cmd, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000360{
Lokesh Vutla303b2672013-12-10 15:02:21 +0530361 if (!cmd)
362 return;
363
Matt Porter65991ec2013-03-15 10:07:03 +0000364 writel(cmd->cmd0csratio, &ddr_cmd_reg[nr]->cm0csratio);
Matt Porter65991ec2013-03-15 10:07:03 +0000365 writel(cmd->cmd0iclkout, &ddr_cmd_reg[nr]->cm0iclkout);
Chandan Nath98b036e2011-10-14 02:58:24 +0000366
Matt Porter65991ec2013-03-15 10:07:03 +0000367 writel(cmd->cmd1csratio, &ddr_cmd_reg[nr]->cm1csratio);
Matt Porter65991ec2013-03-15 10:07:03 +0000368 writel(cmd->cmd1iclkout, &ddr_cmd_reg[nr]->cm1iclkout);
Chandan Nath98b036e2011-10-14 02:58:24 +0000369
Matt Porter65991ec2013-03-15 10:07:03 +0000370 writel(cmd->cmd2csratio, &ddr_cmd_reg[nr]->cm2csratio);
Matt Porter65991ec2013-03-15 10:07:03 +0000371 writel(cmd->cmd2iclkout, &ddr_cmd_reg[nr]->cm2iclkout);
Chandan Nath98b036e2011-10-14 02:58:24 +0000372}
373
374/**
375 * Configure DDR DATA registers
376 */
Matt Porter65991ec2013-03-15 10:07:03 +0000377void config_ddr_data(const struct ddr_data *data, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000378{
Matt Porter65991ec2013-03-15 10:07:03 +0000379 int i;
380
Lokesh Vutla303b2672013-12-10 15:02:21 +0530381 if (!data)
382 return;
383
Matt Porter65991ec2013-03-15 10:07:03 +0000384 for (i = 0; i < DDR_DATA_REGS_NR; i++) {
385 writel(data->datardsratio0,
386 &(ddr_data_reg[nr]+i)->dt0rdsratio0);
387 writel(data->datawdsratio0,
388 &(ddr_data_reg[nr]+i)->dt0wdsratio0);
389 writel(data->datawiratio0,
390 &(ddr_data_reg[nr]+i)->dt0wiratio0);
391 writel(data->datagiratio0,
392 &(ddr_data_reg[nr]+i)->dt0giratio0);
393 writel(data->datafwsratio0,
394 &(ddr_data_reg[nr]+i)->dt0fwsratio0);
395 writel(data->datawrsratio0,
396 &(ddr_data_reg[nr]+i)->dt0wrsratio0);
Matt Porter65991ec2013-03-15 10:07:03 +0000397 }
Chandan Nath98b036e2011-10-14 02:58:24 +0000398}
399
Lokesh Vutla303b2672013-12-10 15:02:21 +0530400void config_io_ctrl(const struct ctrl_ioregs *ioregs)
Chandan Nath98b036e2011-10-14 02:58:24 +0000401{
Lokesh Vutla303b2672013-12-10 15:02:21 +0530402 if (!ioregs)
403 return;
404
405 writel(ioregs->cm0ioctl, &ioctrl_reg->cm0ioctl);
406 writel(ioregs->cm1ioctl, &ioctrl_reg->cm1ioctl);
407 writel(ioregs->cm2ioctl, &ioctrl_reg->cm2ioctl);
408 writel(ioregs->dt0ioctl, &ioctrl_reg->dt0ioctl);
409 writel(ioregs->dt1ioctl, &ioctrl_reg->dt1ioctl);
410#ifdef CONFIG_AM43XX
411 writel(ioregs->dt2ioctrl, &ioctrl_reg->dt2ioctrl);
412 writel(ioregs->dt3ioctrl, &ioctrl_reg->dt3ioctrl);
413 writel(ioregs->emif_sdram_config_ext,
414 &ioctrl_reg->emif_sdram_config_ext);
415#endif
Chandan Nath98b036e2011-10-14 02:58:24 +0000416}