blob: 888cf1f73269cb39d245f148756a8b8a0f12d659 [file] [log] [blame]
Chandan Nath98b036e2011-10-14 02:58:24 +00001/*
2 * DDR Configuration for AM33xx devices.
3 *
Wolfgang Denkd79de1d2013-07-08 09:37:19 +02004 * Copyright (C) 2011 Texas Instruments Incorporated - http://www.ti.com/
Chandan Nath98b036e2011-10-14 02:58:24 +00005 *
Wolfgang Denkd79de1d2013-07-08 09:37:19 +02006 * SPDX-License-Identifier: GPL-2.0+
Chandan Nath98b036e2011-10-14 02:58:24 +00007 */
8
9#include <asm/arch/cpu.h>
10#include <asm/arch/ddr_defs.h>
Satyanarayana, Sandhya11784752012-08-09 18:29:57 +000011#include <asm/arch/sys_proto.h>
Chandan Nath98b036e2011-10-14 02:58:24 +000012#include <asm/io.h>
Tom Rini0d654712012-05-29 09:02:15 -070013#include <asm/emif.h>
Chandan Nath98b036e2011-10-14 02:58:24 +000014
15/**
16 * Base address for EMIF instances
17 */
Matt Porter65991ec2013-03-15 10:07:03 +000018static struct emif_reg_struct *emif_reg[2] = {
19 (struct emif_reg_struct *)EMIF4_0_CFG_BASE,
20 (struct emif_reg_struct *)EMIF4_1_CFG_BASE};
Chandan Nath98b036e2011-10-14 02:58:24 +000021
22/**
Matt Porter65991ec2013-03-15 10:07:03 +000023 * Base addresses for DDR PHY cmd/data regs
Chandan Nath98b036e2011-10-14 02:58:24 +000024 */
Matt Porter65991ec2013-03-15 10:07:03 +000025static struct ddr_cmd_regs *ddr_cmd_reg[2] = {
26 (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR,
27 (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR2};
28
29static struct ddr_data_regs *ddr_data_reg[2] = {
30 (struct ddr_data_regs *)DDR_PHY_DATA_ADDR,
31 (struct ddr_data_regs *)DDR_PHY_DATA_ADDR2};
Chandan Nath98b036e2011-10-14 02:58:24 +000032
33/**
34 * Base address for ddr io control instances
35 */
36static struct ddr_cmdtctrl *ioctrl_reg = {
37 (struct ddr_cmdtctrl *)DDR_CONTROL_BASE_ADDR};
38
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +053039static inline u32 get_mr(int nr, u32 cs, u32 mr_addr)
40{
41 u32 mr;
42
43 mr_addr |= cs << EMIF_REG_CS_SHIFT;
44 writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
45
46 mr = readl(&emif_reg[nr]->emif_lpddr2_mode_reg_data);
47 debug("get_mr: EMIF1 cs %d mr %08x val 0x%x\n", cs, mr_addr, mr);
48 if (((mr & 0x0000ff00) >> 8) == (mr & 0xff) &&
49 ((mr & 0x00ff0000) >> 16) == (mr & 0xff) &&
50 ((mr & 0xff000000) >> 24) == (mr & 0xff))
51 return mr & 0xff;
52 else
53 return mr;
54}
55
56static inline void set_mr(int nr, u32 cs, u32 mr_addr, u32 mr_val)
57{
58 mr_addr |= cs << EMIF_REG_CS_SHIFT;
59 writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
60 writel(mr_val, &emif_reg[nr]->emif_lpddr2_mode_reg_data);
61}
62
63static void configure_mr(int nr, u32 cs)
64{
65 u32 mr_addr;
66
67 while (get_mr(nr, cs, LPDDR2_MR0) & LPDDR2_MR0_DAI_MASK)
68 ;
69 set_mr(nr, cs, LPDDR2_MR10, 0x56);
70
71 set_mr(nr, cs, LPDDR2_MR1, 0x43);
72 set_mr(nr, cs, LPDDR2_MR2, 0x2);
73
74 mr_addr = LPDDR2_MR2 | EMIF_REG_REFRESH_EN_MASK;
75 set_mr(nr, cs, mr_addr, 0x2);
76}
77
78/*
James Doublesin53c723b2014-12-22 16:26:11 -060079 * Configure EMIF4D5 registers and MR registers For details about these magic
80 * values please see the EMIF registers section of the TRM.
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +053081 */
82void config_sdram_emif4d5(const struct emif_regs *regs, int nr)
83{
Dave Gerlachd9e2d262014-02-18 07:31:59 -050084 writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl);
85 writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl_shdw);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +053086 writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
87
88 writel(regs->temp_alert_config, &emif_reg[nr]->emif_temp_alert_config);
89 writel(regs->emif_rd_wr_lvl_rmp_win,
90 &emif_reg[nr]->emif_rd_wr_lvl_rmp_win);
91 writel(regs->emif_rd_wr_lvl_rmp_ctl,
92 &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
93 writel(regs->emif_rd_wr_lvl_ctl, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
94 writel(regs->emif_rd_wr_exec_thresh,
95 &emif_reg[nr]->emif_rd_wr_exec_thresh);
96
Cooper Jr., Franklindf25e352014-06-27 13:31:15 -050097 /*
98 * for most SOCs these registers won't need to be changed so only
99 * write to these registers if someone explicitly has set the
100 * register's value.
101 */
102 if(regs->emif_cos_config) {
103 writel(regs->emif_prio_class_serv_map, &emif_reg[nr]->emif_prio_class_serv_map);
104 writel(regs->emif_connect_id_serv_1_map, &emif_reg[nr]->emif_connect_id_serv_1_map);
105 writel(regs->emif_connect_id_serv_2_map, &emif_reg[nr]->emif_connect_id_serv_2_map);
106 writel(regs->emif_cos_config, &emif_reg[nr]->emif_cos_config);
107 }
108
James Doublesin53c723b2014-12-22 16:26:11 -0600109 /*
110 * Sequence to ensure that the PHY is in a known state prior to
111 * startting hardware leveling. Also acts as to latch some state from
112 * the EMIF into the PHY.
113 */
114 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
115 writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
116 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
117
118 clrbits_le32(&emif_reg[nr]->emif_sdram_ref_ctrl,
119 EMIF_REG_INITREF_DIS_MASK);
120
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530121 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Dave Gerlach84d41132014-02-18 07:32:00 -0500122 writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
James Doublesin53c723b2014-12-22 16:26:11 -0600123 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
124 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
125
Tom Rinibe8d6352015-06-05 15:51:11 +0530126 /* Perform hardware leveling for DDR3 */
127 if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3) {
128 udelay(1000);
129 writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36) |
130 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
131 writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw) |
132 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
James Doublesin53c723b2014-12-22 16:26:11 -0600133
Tom Rinibe8d6352015-06-05 15:51:11 +0530134 writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
James Doublesin53c723b2014-12-22 16:26:11 -0600135
Tom Rinibe8d6352015-06-05 15:51:11 +0530136 /* Enable read leveling */
137 writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
James Doublesin53c723b2014-12-22 16:26:11 -0600138
Tom Rinibe8d6352015-06-05 15:51:11 +0530139 /*
140 * Enable full read and write leveling. Wait for read and write
141 * leveling bit to clear RDWRLVLFULL_START bit 31
142 */
143 while ((readl(&emif_reg[nr]->emif_rd_wr_lvl_ctl) & 0x80000000)
144 != 0)
145 ;
James Doublesin53c723b2014-12-22 16:26:11 -0600146
Tom Rinibe8d6352015-06-05 15:51:11 +0530147 /* Check the timeout register to see if leveling is complete */
148 if ((readl(&emif_reg[nr]->emif_status) & 0x70) != 0)
149 puts("DDR3 H/W leveling incomplete with errors\n");
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530150
Tom Rinibe8d6352015-06-05 15:51:11 +0530151 } else {
152 /* DDR2 */
Lokesh Vutladd0037a2013-12-10 15:02:23 +0530153 configure_mr(nr, 0);
154 configure_mr(nr, 1);
155 }
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530156}
157
Chandan Nath98b036e2011-10-14 02:58:24 +0000158/**
Chandan Nath98b036e2011-10-14 02:58:24 +0000159 * Configure SDRAM
160 */
Matt Porter65991ec2013-03-15 10:07:03 +0000161void config_sdram(const struct emif_regs *regs, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000162{
Tom Rini1b669fd2013-02-26 16:35:33 -0500163 if (regs->zq_config) {
Matt Porter65991ec2013-03-15 10:07:03 +0000164 writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
Satyanarayana, Sandhya11784752012-08-09 18:29:57 +0000165 writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
Matt Porter65991ec2013-03-15 10:07:03 +0000166 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Egli, Samuel7be78742015-12-02 15:27:56 +0100167
168 /* Trigger initialization */
169 writel(0x00003100, &emif_reg[nr]->emif_sdram_ref_ctrl);
170 /* Wait 1ms because of L3 timeout error */
171 udelay(1000);
172
173 /* Write proper sdram_ref_cref_ctrl value */
Matt Porter65991ec2013-03-15 10:07:03 +0000174 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
175 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
Satyanarayana, Sandhya11784752012-08-09 18:29:57 +0000176 }
Matt Porter65991ec2013-03-15 10:07:03 +0000177 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
178 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
Tom Rinie26108b2015-04-02 16:01:33 -0400179 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Chandan Nath98b036e2011-10-14 02:58:24 +0000180}
181
182/**
183 * Set SDRAM timings
184 */
Matt Porter65991ec2013-03-15 10:07:03 +0000185void set_sdram_timings(const struct emif_regs *regs, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000186{
Matt Porter65991ec2013-03-15 10:07:03 +0000187 writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1);
188 writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1_shdw);
189 writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2);
190 writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2_shdw);
191 writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3);
192 writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3_shdw);
Chandan Nath98b036e2011-10-14 02:58:24 +0000193}
194
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530195/*
Tom Rinibe8d6352015-06-05 15:51:11 +0530196 * Configure EXT PHY registers for software leveling
197 */
198static void ext_phy_settings_swlvl(const struct emif_regs *regs, int nr)
199{
200 u32 *ext_phy_ctrl_base = 0;
201 u32 *emif_ext_phy_ctrl_base = 0;
202 __maybe_unused const u32 *ext_phy_ctrl_const_regs;
203 u32 i = 0;
204 __maybe_unused u32 size;
205
206 ext_phy_ctrl_base = (u32 *)&(regs->emif_ddr_ext_phy_ctrl_1);
207 emif_ext_phy_ctrl_base =
208 (u32 *)&(emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
209
210 /* Configure external phy control timing registers */
211 for (i = 0; i < EMIF_EXT_PHY_CTRL_TIMING_REG; i++) {
212 writel(*ext_phy_ctrl_base, emif_ext_phy_ctrl_base++);
213 /* Update shadow registers */
214 writel(*ext_phy_ctrl_base++, emif_ext_phy_ctrl_base++);
215 }
216
217#ifdef CONFIG_AM43XX
218 /*
219 * External phy 6-24 registers do not change with ddr frequency.
220 * These only need to be set on DDR2 on AM43xx.
221 */
222 emif_get_ext_phy_ctrl_const_regs(&ext_phy_ctrl_const_regs, &size);
223
224 if (!size)
225 return;
226
227 for (i = 0; i < size; i++) {
228 writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
229 /* Update shadow registers */
230 writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
231 }
232#endif
233}
234
235/*
James Doublesin53c723b2014-12-22 16:26:11 -0600236 * Configure EXT PHY registers for hardware leveling
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530237 */
Tom Rinibe8d6352015-06-05 15:51:11 +0530238static void ext_phy_settings_hwlvl(const struct emif_regs *regs, int nr)
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530239{
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530240 /*
James Doublesin53c723b2014-12-22 16:26:11 -0600241 * Enable hardware leveling on the EMIF. For details about these
242 * magic values please see the EMIF registers section of the TRM.
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530243 */
James Doublesin53c723b2014-12-22 16:26:11 -0600244 writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
245 writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw);
246 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22);
247 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22_shdw);
248 writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23);
249 writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23_shdw);
250 writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24);
251 writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24_shdw);
252 writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25);
253 writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25_shdw);
254 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26);
255 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26_shdw);
256 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27);
257 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27_shdw);
258 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28);
259 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28_shdw);
260 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29);
261 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29_shdw);
262 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30);
263 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30_shdw);
264 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31);
265 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31_shdw);
266 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32);
267 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32_shdw);
268 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33);
269 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33_shdw);
270 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34);
271 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34_shdw);
272 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35);
273 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35_shdw);
274 writel(0x000000FF, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
275 writel(0x000000FF, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530276
James Doublesin53c723b2014-12-22 16:26:11 -0600277 /*
278 * Sequence to ensure that the PHY is again in a known state after
279 * hardware leveling.
280 */
281 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
282 writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
283 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530284}
285
Chandan Nath98b036e2011-10-14 02:58:24 +0000286/**
287 * Configure DDR PHY
288 */
Matt Porter65991ec2013-03-15 10:07:03 +0000289void config_ddr_phy(const struct emif_regs *regs, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000290{
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530291 /*
James Doublesin53c723b2014-12-22 16:26:11 -0600292 * Disable initialization and refreshes for now until we
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530293 * finish programming EMIF regs.
James Doublesin53c723b2014-12-22 16:26:11 -0600294 * Also set time between rising edge of DDR_RESET to rising
295 * edge of DDR_CKE to > 500us per memory spec.
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530296 */
James Doublesin53c723b2014-12-22 16:26:11 -0600297#ifndef CONFIG_AM43XX
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530298 setbits_le32(&emif_reg[nr]->emif_sdram_ref_ctrl,
299 EMIF_REG_INITREF_DIS_MASK);
James Doublesin53c723b2014-12-22 16:26:11 -0600300#endif
301 if (regs->zq_config)
Egli, Samuel7be78742015-12-02 15:27:56 +0100302 /* Set time between rising edge of DDR_RESET to rising
303 * edge of DDR_CKE to > 500us per memory spec. */
304 writel(0x00003100, &emif_reg[nr]->emif_sdram_ref_ctrl);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530305
Matt Porter65991ec2013-03-15 10:07:03 +0000306 writel(regs->emif_ddr_phy_ctlr_1,
307 &emif_reg[nr]->emif_ddr_phy_ctrl_1);
308 writel(regs->emif_ddr_phy_ctlr_1,
309 &emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530310
Tom Rinibe8d6352015-06-05 15:51:11 +0530311 if (get_emif_rev((u32)emif_reg[nr]) == EMIF_4D5) {
312 if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3)
313 ext_phy_settings_hwlvl(regs, nr);
314 else
315 ext_phy_settings_swlvl(regs, nr);
316 }
Chandan Nath98b036e2011-10-14 02:58:24 +0000317}
318
319/**
320 * Configure DDR CMD control registers
321 */
Matt Porter65991ec2013-03-15 10:07:03 +0000322void config_cmd_ctrl(const struct cmd_control *cmd, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000323{
Lokesh Vutla303b2672013-12-10 15:02:21 +0530324 if (!cmd)
325 return;
326
Matt Porter65991ec2013-03-15 10:07:03 +0000327 writel(cmd->cmd0csratio, &ddr_cmd_reg[nr]->cm0csratio);
Matt Porter65991ec2013-03-15 10:07:03 +0000328 writel(cmd->cmd0iclkout, &ddr_cmd_reg[nr]->cm0iclkout);
Chandan Nath98b036e2011-10-14 02:58:24 +0000329
Matt Porter65991ec2013-03-15 10:07:03 +0000330 writel(cmd->cmd1csratio, &ddr_cmd_reg[nr]->cm1csratio);
Matt Porter65991ec2013-03-15 10:07:03 +0000331 writel(cmd->cmd1iclkout, &ddr_cmd_reg[nr]->cm1iclkout);
Chandan Nath98b036e2011-10-14 02:58:24 +0000332
Matt Porter65991ec2013-03-15 10:07:03 +0000333 writel(cmd->cmd2csratio, &ddr_cmd_reg[nr]->cm2csratio);
Matt Porter65991ec2013-03-15 10:07:03 +0000334 writel(cmd->cmd2iclkout, &ddr_cmd_reg[nr]->cm2iclkout);
Chandan Nath98b036e2011-10-14 02:58:24 +0000335}
336
337/**
338 * Configure DDR DATA registers
339 */
Matt Porter65991ec2013-03-15 10:07:03 +0000340void config_ddr_data(const struct ddr_data *data, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000341{
Matt Porter65991ec2013-03-15 10:07:03 +0000342 int i;
343
Lokesh Vutla303b2672013-12-10 15:02:21 +0530344 if (!data)
345 return;
346
Matt Porter65991ec2013-03-15 10:07:03 +0000347 for (i = 0; i < DDR_DATA_REGS_NR; i++) {
348 writel(data->datardsratio0,
349 &(ddr_data_reg[nr]+i)->dt0rdsratio0);
350 writel(data->datawdsratio0,
351 &(ddr_data_reg[nr]+i)->dt0wdsratio0);
352 writel(data->datawiratio0,
353 &(ddr_data_reg[nr]+i)->dt0wiratio0);
354 writel(data->datagiratio0,
355 &(ddr_data_reg[nr]+i)->dt0giratio0);
356 writel(data->datafwsratio0,
357 &(ddr_data_reg[nr]+i)->dt0fwsratio0);
358 writel(data->datawrsratio0,
359 &(ddr_data_reg[nr]+i)->dt0wrsratio0);
Matt Porter65991ec2013-03-15 10:07:03 +0000360 }
Chandan Nath98b036e2011-10-14 02:58:24 +0000361}
362
Lokesh Vutla303b2672013-12-10 15:02:21 +0530363void config_io_ctrl(const struct ctrl_ioregs *ioregs)
Chandan Nath98b036e2011-10-14 02:58:24 +0000364{
Lokesh Vutla303b2672013-12-10 15:02:21 +0530365 if (!ioregs)
366 return;
367
368 writel(ioregs->cm0ioctl, &ioctrl_reg->cm0ioctl);
369 writel(ioregs->cm1ioctl, &ioctrl_reg->cm1ioctl);
370 writel(ioregs->cm2ioctl, &ioctrl_reg->cm2ioctl);
371 writel(ioregs->dt0ioctl, &ioctrl_reg->dt0ioctl);
372 writel(ioregs->dt1ioctl, &ioctrl_reg->dt1ioctl);
373#ifdef CONFIG_AM43XX
374 writel(ioregs->dt2ioctrl, &ioctrl_reg->dt2ioctrl);
375 writel(ioregs->dt3ioctrl, &ioctrl_reg->dt3ioctrl);
376 writel(ioregs->emif_sdram_config_ext,
377 &ioctrl_reg->emif_sdram_config_ext);
378#endif
Chandan Nath98b036e2011-10-14 02:58:24 +0000379}