blob: b3fb0c47ab2dfba7d393c4a3b9d5717031b030af [file] [log] [blame]
Chandan Nath98b036e2011-10-14 02:58:24 +00001/*
2 * DDR Configuration for AM33xx devices.
3 *
Wolfgang Denkd79de1d2013-07-08 09:37:19 +02004 * Copyright (C) 2011 Texas Instruments Incorporated - http://www.ti.com/
Chandan Nath98b036e2011-10-14 02:58:24 +00005 *
Wolfgang Denkd79de1d2013-07-08 09:37:19 +02006 * SPDX-License-Identifier: GPL-2.0+
Chandan Nath98b036e2011-10-14 02:58:24 +00007 */
8
9#include <asm/arch/cpu.h>
10#include <asm/arch/ddr_defs.h>
Satyanarayana, Sandhya11784752012-08-09 18:29:57 +000011#include <asm/arch/sys_proto.h>
Chandan Nath98b036e2011-10-14 02:58:24 +000012#include <asm/io.h>
Tom Rini0d654712012-05-29 09:02:15 -070013#include <asm/emif.h>
Chandan Nath98b036e2011-10-14 02:58:24 +000014
15/**
16 * Base address for EMIF instances
17 */
Matt Porter65991ec2013-03-15 10:07:03 +000018static struct emif_reg_struct *emif_reg[2] = {
19 (struct emif_reg_struct *)EMIF4_0_CFG_BASE,
20 (struct emif_reg_struct *)EMIF4_1_CFG_BASE};
Chandan Nath98b036e2011-10-14 02:58:24 +000021
22/**
Matt Porter65991ec2013-03-15 10:07:03 +000023 * Base addresses for DDR PHY cmd/data regs
Chandan Nath98b036e2011-10-14 02:58:24 +000024 */
Matt Porter65991ec2013-03-15 10:07:03 +000025static struct ddr_cmd_regs *ddr_cmd_reg[2] = {
26 (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR,
27 (struct ddr_cmd_regs *)DDR_PHY_CMD_ADDR2};
28
29static struct ddr_data_regs *ddr_data_reg[2] = {
30 (struct ddr_data_regs *)DDR_PHY_DATA_ADDR,
31 (struct ddr_data_regs *)DDR_PHY_DATA_ADDR2};
Chandan Nath98b036e2011-10-14 02:58:24 +000032
33/**
34 * Base address for ddr io control instances
35 */
36static struct ddr_cmdtctrl *ioctrl_reg = {
37 (struct ddr_cmdtctrl *)DDR_CONTROL_BASE_ADDR};
38
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +053039static inline u32 get_mr(int nr, u32 cs, u32 mr_addr)
40{
41 u32 mr;
42
43 mr_addr |= cs << EMIF_REG_CS_SHIFT;
44 writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
45
46 mr = readl(&emif_reg[nr]->emif_lpddr2_mode_reg_data);
47 debug("get_mr: EMIF1 cs %d mr %08x val 0x%x\n", cs, mr_addr, mr);
48 if (((mr & 0x0000ff00) >> 8) == (mr & 0xff) &&
49 ((mr & 0x00ff0000) >> 16) == (mr & 0xff) &&
50 ((mr & 0xff000000) >> 24) == (mr & 0xff))
51 return mr & 0xff;
52 else
53 return mr;
54}
55
56static inline void set_mr(int nr, u32 cs, u32 mr_addr, u32 mr_val)
57{
58 mr_addr |= cs << EMIF_REG_CS_SHIFT;
59 writel(mr_addr, &emif_reg[nr]->emif_lpddr2_mode_reg_cfg);
60 writel(mr_val, &emif_reg[nr]->emif_lpddr2_mode_reg_data);
61}
62
63static void configure_mr(int nr, u32 cs)
64{
65 u32 mr_addr;
66
67 while (get_mr(nr, cs, LPDDR2_MR0) & LPDDR2_MR0_DAI_MASK)
68 ;
69 set_mr(nr, cs, LPDDR2_MR10, 0x56);
70
71 set_mr(nr, cs, LPDDR2_MR1, 0x43);
72 set_mr(nr, cs, LPDDR2_MR2, 0x2);
73
74 mr_addr = LPDDR2_MR2 | EMIF_REG_REFRESH_EN_MASK;
75 set_mr(nr, cs, mr_addr, 0x2);
76}
77
78/*
James Doublesin53c723b2014-12-22 16:26:11 -060079 * Configure EMIF4D5 registers and MR registers For details about these magic
80 * values please see the EMIF registers section of the TRM.
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +053081 */
82void config_sdram_emif4d5(const struct emif_regs *regs, int nr)
83{
Dave Gerlachd9e2d262014-02-18 07:31:59 -050084 writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl);
85 writel(0xA0, &emif_reg[nr]->emif_pwr_mgmt_ctrl_shdw);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +053086 writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
87
88 writel(regs->temp_alert_config, &emif_reg[nr]->emif_temp_alert_config);
89 writel(regs->emif_rd_wr_lvl_rmp_win,
90 &emif_reg[nr]->emif_rd_wr_lvl_rmp_win);
91 writel(regs->emif_rd_wr_lvl_rmp_ctl,
92 &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
93 writel(regs->emif_rd_wr_lvl_ctl, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
94 writel(regs->emif_rd_wr_exec_thresh,
95 &emif_reg[nr]->emif_rd_wr_exec_thresh);
96
Cooper Jr., Franklindf25e352014-06-27 13:31:15 -050097 /*
98 * for most SOCs these registers won't need to be changed so only
99 * write to these registers if someone explicitly has set the
100 * register's value.
101 */
102 if(regs->emif_cos_config) {
103 writel(regs->emif_prio_class_serv_map, &emif_reg[nr]->emif_prio_class_serv_map);
104 writel(regs->emif_connect_id_serv_1_map, &emif_reg[nr]->emif_connect_id_serv_1_map);
105 writel(regs->emif_connect_id_serv_2_map, &emif_reg[nr]->emif_connect_id_serv_2_map);
106 writel(regs->emif_cos_config, &emif_reg[nr]->emif_cos_config);
107 }
108
James Doublesin53c723b2014-12-22 16:26:11 -0600109 /*
110 * Sequence to ensure that the PHY is in a known state prior to
111 * startting hardware leveling. Also acts as to latch some state from
112 * the EMIF into the PHY.
113 */
114 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
115 writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
116 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
117
118 clrbits_le32(&emif_reg[nr]->emif_sdram_ref_ctrl,
119 EMIF_REG_INITREF_DIS_MASK);
120
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530121 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Dave Gerlach84d41132014-02-18 07:32:00 -0500122 writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
James Doublesin53c723b2014-12-22 16:26:11 -0600123 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
124 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
125
Tom Rinibe8d6352015-06-05 15:51:11 +0530126 /* Perform hardware leveling for DDR3 */
127 if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3) {
128 udelay(1000);
129 writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36) |
130 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
131 writel(readl(&emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw) |
132 0x100, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
James Doublesin53c723b2014-12-22 16:26:11 -0600133
Tom Rinibe8d6352015-06-05 15:51:11 +0530134 writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_rmp_ctl);
James Doublesin53c723b2014-12-22 16:26:11 -0600135
Tom Rinibe8d6352015-06-05 15:51:11 +0530136 /* Enable read leveling */
137 writel(0x80000000, &emif_reg[nr]->emif_rd_wr_lvl_ctl);
James Doublesin53c723b2014-12-22 16:26:11 -0600138
Tom Rinibe8d6352015-06-05 15:51:11 +0530139 /*
140 * Enable full read and write leveling. Wait for read and write
141 * leveling bit to clear RDWRLVLFULL_START bit 31
142 */
143 while ((readl(&emif_reg[nr]->emif_rd_wr_lvl_ctl) & 0x80000000)
144 != 0)
145 ;
James Doublesin53c723b2014-12-22 16:26:11 -0600146
Tom Rinibe8d6352015-06-05 15:51:11 +0530147 /* Check the timeout register to see if leveling is complete */
148 if ((readl(&emif_reg[nr]->emif_status) & 0x70) != 0)
149 puts("DDR3 H/W leveling incomplete with errors\n");
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530150
Tom Rinibe8d6352015-06-05 15:51:11 +0530151 } else {
152 /* DDR2 */
Lokesh Vutladd0037a2013-12-10 15:02:23 +0530153 configure_mr(nr, 0);
154 configure_mr(nr, 1);
155 }
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530156}
157
Chandan Nath98b036e2011-10-14 02:58:24 +0000158/**
Chandan Nath98b036e2011-10-14 02:58:24 +0000159 * Configure SDRAM
160 */
Matt Porter65991ec2013-03-15 10:07:03 +0000161void config_sdram(const struct emif_regs *regs, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000162{
Tom Rini1b669fd2013-02-26 16:35:33 -0500163 if (regs->zq_config) {
Matt Porter65991ec2013-03-15 10:07:03 +0000164 writel(regs->zq_config, &emif_reg[nr]->emif_zq_config);
Satyanarayana, Sandhya11784752012-08-09 18:29:57 +0000165 writel(regs->sdram_config, &cstat->secure_emif_sdram_config);
Matt Porter65991ec2013-03-15 10:07:03 +0000166 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
167 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
168 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
Satyanarayana, Sandhya11784752012-08-09 18:29:57 +0000169 }
Matt Porter65991ec2013-03-15 10:07:03 +0000170 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl);
171 writel(regs->ref_ctrl, &emif_reg[nr]->emif_sdram_ref_ctrl_shdw);
Tom Rinie26108b2015-04-02 16:01:33 -0400172 writel(regs->sdram_config, &emif_reg[nr]->emif_sdram_config);
Chandan Nath98b036e2011-10-14 02:58:24 +0000173}
174
175/**
176 * Set SDRAM timings
177 */
Matt Porter65991ec2013-03-15 10:07:03 +0000178void set_sdram_timings(const struct emif_regs *regs, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000179{
Matt Porter65991ec2013-03-15 10:07:03 +0000180 writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1);
181 writel(regs->sdram_tim1, &emif_reg[nr]->emif_sdram_tim_1_shdw);
182 writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2);
183 writel(regs->sdram_tim2, &emif_reg[nr]->emif_sdram_tim_2_shdw);
184 writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3);
185 writel(regs->sdram_tim3, &emif_reg[nr]->emif_sdram_tim_3_shdw);
Chandan Nath98b036e2011-10-14 02:58:24 +0000186}
187
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530188/*
Tom Rinibe8d6352015-06-05 15:51:11 +0530189 * Configure EXT PHY registers for software leveling
190 */
191static void ext_phy_settings_swlvl(const struct emif_regs *regs, int nr)
192{
193 u32 *ext_phy_ctrl_base = 0;
194 u32 *emif_ext_phy_ctrl_base = 0;
195 __maybe_unused const u32 *ext_phy_ctrl_const_regs;
196 u32 i = 0;
197 __maybe_unused u32 size;
198
199 ext_phy_ctrl_base = (u32 *)&(regs->emif_ddr_ext_phy_ctrl_1);
200 emif_ext_phy_ctrl_base =
201 (u32 *)&(emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
202
203 /* Configure external phy control timing registers */
204 for (i = 0; i < EMIF_EXT_PHY_CTRL_TIMING_REG; i++) {
205 writel(*ext_phy_ctrl_base, emif_ext_phy_ctrl_base++);
206 /* Update shadow registers */
207 writel(*ext_phy_ctrl_base++, emif_ext_phy_ctrl_base++);
208 }
209
210#ifdef CONFIG_AM43XX
211 /*
212 * External phy 6-24 registers do not change with ddr frequency.
213 * These only need to be set on DDR2 on AM43xx.
214 */
215 emif_get_ext_phy_ctrl_const_regs(&ext_phy_ctrl_const_regs, &size);
216
217 if (!size)
218 return;
219
220 for (i = 0; i < size; i++) {
221 writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
222 /* Update shadow registers */
223 writel(ext_phy_ctrl_const_regs[i], emif_ext_phy_ctrl_base++);
224 }
225#endif
226}
227
228/*
James Doublesin53c723b2014-12-22 16:26:11 -0600229 * Configure EXT PHY registers for hardware leveling
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530230 */
Tom Rinibe8d6352015-06-05 15:51:11 +0530231static void ext_phy_settings_hwlvl(const struct emif_regs *regs, int nr)
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530232{
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530233 /*
James Doublesin53c723b2014-12-22 16:26:11 -0600234 * Enable hardware leveling on the EMIF. For details about these
235 * magic values please see the EMIF registers section of the TRM.
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530236 */
James Doublesin53c723b2014-12-22 16:26:11 -0600237 writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1);
238 writel(0x08020080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_1_shdw);
239 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22);
240 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_22_shdw);
241 writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23);
242 writel(0x00600020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_23_shdw);
243 writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24);
244 writel(0x40010080, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_24_shdw);
245 writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25);
246 writel(0x08102040, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_25_shdw);
247 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26);
248 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_26_shdw);
249 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27);
250 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_27_shdw);
251 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28);
252 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_28_shdw);
253 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29);
254 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_29_shdw);
255 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30);
256 writel(0x00200020, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_30_shdw);
257 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31);
258 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_31_shdw);
259 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32);
260 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_32_shdw);
261 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33);
262 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_33_shdw);
263 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34);
264 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_34_shdw);
265 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35);
266 writel(0x00000000, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_35_shdw);
267 writel(0x000000FF, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36);
268 writel(0x000000FF, &emif_reg[nr]->emif_ddr_ext_phy_ctrl_36_shdw);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530269
James Doublesin53c723b2014-12-22 16:26:11 -0600270 /*
271 * Sequence to ensure that the PHY is again in a known state after
272 * hardware leveling.
273 */
274 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
275 writel(0x2411, &emif_reg[nr]->emif_iodft_tlgc);
276 writel(0x2011, &emif_reg[nr]->emif_iodft_tlgc);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530277}
278
Chandan Nath98b036e2011-10-14 02:58:24 +0000279/**
280 * Configure DDR PHY
281 */
Matt Porter65991ec2013-03-15 10:07:03 +0000282void config_ddr_phy(const struct emif_regs *regs, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000283{
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530284 /*
James Doublesin53c723b2014-12-22 16:26:11 -0600285 * Disable initialization and refreshes for now until we
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530286 * finish programming EMIF regs.
James Doublesin53c723b2014-12-22 16:26:11 -0600287 * Also set time between rising edge of DDR_RESET to rising
288 * edge of DDR_CKE to > 500us per memory spec.
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530289 */
James Doublesin53c723b2014-12-22 16:26:11 -0600290#ifndef CONFIG_AM43XX
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530291 setbits_le32(&emif_reg[nr]->emif_sdram_ref_ctrl,
292 EMIF_REG_INITREF_DIS_MASK);
James Doublesin53c723b2014-12-22 16:26:11 -0600293#endif
294 if (regs->zq_config)
295 writel(0x80003100, &emif_reg[nr]->emif_sdram_ref_ctrl);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530296
Matt Porter65991ec2013-03-15 10:07:03 +0000297 writel(regs->emif_ddr_phy_ctlr_1,
298 &emif_reg[nr]->emif_ddr_phy_ctrl_1);
299 writel(regs->emif_ddr_phy_ctlr_1,
300 &emif_reg[nr]->emif_ddr_phy_ctrl_1_shdw);
Lokesh Vutlaa82d4e12013-12-10 15:02:22 +0530301
Tom Rinibe8d6352015-06-05 15:51:11 +0530302 if (get_emif_rev((u32)emif_reg[nr]) == EMIF_4D5) {
303 if (emif_sdram_type(regs->sdram_config) == EMIF_SDRAM_TYPE_DDR3)
304 ext_phy_settings_hwlvl(regs, nr);
305 else
306 ext_phy_settings_swlvl(regs, nr);
307 }
Chandan Nath98b036e2011-10-14 02:58:24 +0000308}
309
310/**
311 * Configure DDR CMD control registers
312 */
Matt Porter65991ec2013-03-15 10:07:03 +0000313void config_cmd_ctrl(const struct cmd_control *cmd, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000314{
Lokesh Vutla303b2672013-12-10 15:02:21 +0530315 if (!cmd)
316 return;
317
Matt Porter65991ec2013-03-15 10:07:03 +0000318 writel(cmd->cmd0csratio, &ddr_cmd_reg[nr]->cm0csratio);
Matt Porter65991ec2013-03-15 10:07:03 +0000319 writel(cmd->cmd0iclkout, &ddr_cmd_reg[nr]->cm0iclkout);
Chandan Nath98b036e2011-10-14 02:58:24 +0000320
Matt Porter65991ec2013-03-15 10:07:03 +0000321 writel(cmd->cmd1csratio, &ddr_cmd_reg[nr]->cm1csratio);
Matt Porter65991ec2013-03-15 10:07:03 +0000322 writel(cmd->cmd1iclkout, &ddr_cmd_reg[nr]->cm1iclkout);
Chandan Nath98b036e2011-10-14 02:58:24 +0000323
Matt Porter65991ec2013-03-15 10:07:03 +0000324 writel(cmd->cmd2csratio, &ddr_cmd_reg[nr]->cm2csratio);
Matt Porter65991ec2013-03-15 10:07:03 +0000325 writel(cmd->cmd2iclkout, &ddr_cmd_reg[nr]->cm2iclkout);
Chandan Nath98b036e2011-10-14 02:58:24 +0000326}
327
328/**
329 * Configure DDR DATA registers
330 */
Matt Porter65991ec2013-03-15 10:07:03 +0000331void config_ddr_data(const struct ddr_data *data, int nr)
Chandan Nath98b036e2011-10-14 02:58:24 +0000332{
Matt Porter65991ec2013-03-15 10:07:03 +0000333 int i;
334
Lokesh Vutla303b2672013-12-10 15:02:21 +0530335 if (!data)
336 return;
337
Matt Porter65991ec2013-03-15 10:07:03 +0000338 for (i = 0; i < DDR_DATA_REGS_NR; i++) {
339 writel(data->datardsratio0,
340 &(ddr_data_reg[nr]+i)->dt0rdsratio0);
341 writel(data->datawdsratio0,
342 &(ddr_data_reg[nr]+i)->dt0wdsratio0);
343 writel(data->datawiratio0,
344 &(ddr_data_reg[nr]+i)->dt0wiratio0);
345 writel(data->datagiratio0,
346 &(ddr_data_reg[nr]+i)->dt0giratio0);
347 writel(data->datafwsratio0,
348 &(ddr_data_reg[nr]+i)->dt0fwsratio0);
349 writel(data->datawrsratio0,
350 &(ddr_data_reg[nr]+i)->dt0wrsratio0);
Matt Porter65991ec2013-03-15 10:07:03 +0000351 }
Chandan Nath98b036e2011-10-14 02:58:24 +0000352}
353
Lokesh Vutla303b2672013-12-10 15:02:21 +0530354void config_io_ctrl(const struct ctrl_ioregs *ioregs)
Chandan Nath98b036e2011-10-14 02:58:24 +0000355{
Lokesh Vutla303b2672013-12-10 15:02:21 +0530356 if (!ioregs)
357 return;
358
359 writel(ioregs->cm0ioctl, &ioctrl_reg->cm0ioctl);
360 writel(ioregs->cm1ioctl, &ioctrl_reg->cm1ioctl);
361 writel(ioregs->cm2ioctl, &ioctrl_reg->cm2ioctl);
362 writel(ioregs->dt0ioctl, &ioctrl_reg->dt0ioctl);
363 writel(ioregs->dt1ioctl, &ioctrl_reg->dt1ioctl);
364#ifdef CONFIG_AM43XX
365 writel(ioregs->dt2ioctrl, &ioctrl_reg->dt2ioctrl);
366 writel(ioregs->dt3ioctrl, &ioctrl_reg->dt3ioctrl);
367 writel(ioregs->emif_sdram_config_ext,
368 &ioctrl_reg->emif_sdram_config_ext);
369#endif
Chandan Nath98b036e2011-10-14 02:58:24 +0000370}