blob: 94bfaba7e14f1958e8df6d1408a967733634f8ef [file] [log] [blame]
Jacky Baid746daa2019-11-25 13:19:37 +08001/*
Jacky Bai699ece52020-04-13 11:07:40 +08002 * Copyright 2018-2023 NXP
Jacky Baid746daa2019-11-25 13:19:37 +08003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <drivers/delay_timer.h>
8#include <lib/mmio.h>
9
10#include <dram.h>
11
Jacky Baid148bae2020-04-22 21:26:13 +080012void ddr4_mr_write(uint32_t mr, uint32_t data, uint32_t mr_type,
13 uint32_t rank, uint32_t dram_type)
Jacky Baid746daa2019-11-25 13:19:37 +080014{
15 uint32_t val, mr_mirror, data_mirror;
16
17 /*
18 * 1. Poll MRSTAT.mr_wr_busy until it is 0 to make sure
19 * that there is no outstanding MR transAction.
20 */
Jacky Bai89fdd762021-03-16 16:42:54 +080021
22 /*
23 * ERR050712:
24 * When performing a software driven MR access, the following sequence
25 * must be done automatically before performing other APB register accesses.
26 * 1. Set MRCTRL0.mr_wr=1
27 * 2. Check for MRSTAT.mr_wr_busy=0. If not, go to step (2)
28 * 3. Check for MRSTAT.mr_wr_busy=0 again (for the second time). If not, go to step (2)
29 */
30 mmio_setbits_32(DDRC_MRCTRL0(0), BIT(31));
31
32 do {
33 while (mmio_read_32(DDRC_MRSTAT(0)) & 0x1) {
34 ;
35 }
36
37 } while (mmio_read_32(DDRC_MRSTAT(0)) & 0x1);
Jacky Baid746daa2019-11-25 13:19:37 +080038
39 /*
40 * 2. Write the MRCTRL0.mr_type, MRCTRL0.mr_addr, MRCTRL0.mr_rank
41 * and (for MRWs) MRCTRL1.mr_data to define the MR transaction.
42 */
43 val = mmio_read_32(DDRC_DIMMCTL(0));
44 if ((val & 0x2) && (rank == 0x2)) {
45 mr_mirror = (mr & 0x4) | ((mr & 0x1) << 1) | ((mr & 0x2) >> 1); /* BA0, BA1 swap */
Jacky Baid148bae2020-04-22 21:26:13 +080046 if (dram_type == DDRC_DDR4) {
47 data_mirror = (data & 0x1607) | ((data & 0x8) << 1) | ((data & 0x10) >> 1) |
Jacky Baid746daa2019-11-25 13:19:37 +080048 ((data & 0x20) << 1) | ((data & 0x40) >> 1) | ((data & 0x80) << 1) |
Jacky Baid148bae2020-04-22 21:26:13 +080049 ((data & 0x100) >> 1) | ((data & 0x800) << 2) | ((data & 0x2000) >> 2) ;
50 } else {
51 data_mirror = (data & 0xfe07) | ((data & 0x8) << 1) | ((data & 0x10) >> 1) |
52 ((data & 0x20) << 1) | ((data & 0x40) >> 1) | ((data & 0x80) << 1) |
53 ((data & 0x100) >> 1);
54 }
Jacky Baid746daa2019-11-25 13:19:37 +080055 } else {
56 mr_mirror = mr;
57 data_mirror = data;
58 }
59
60 mmio_write_32(DDRC_MRCTRL0(0), mr_type | (mr_mirror << 12) | (rank << 4));
61 mmio_write_32(DDRC_MRCTRL1(0), data_mirror);
62
63 /*
64 * 3. In a separate APB transaction, write the MRCTRL0.mr_wr to 1.
65 * This bit is self-clearing, and triggers the MR transaction.
66 * The uMCTL2 then asserts the MRSTAT.mr_wr_busy while it performs
67 * the MR transaction to SDRAM, and no further accesses can be
68 * initiated until it is deasserted.
69 */
70 mmio_setbits_32(DDRC_MRCTRL0(0), BIT(31));
71
72 while (mmio_read_32(DDRC_MRSTAT(0))) {
73 ;
74 }
75}
76
77void dram_cfg_all_mr(struct dram_info *info, uint32_t pstate)
78{
79 uint32_t num_rank = info->num_rank;
Jacky Baid148bae2020-04-22 21:26:13 +080080 uint32_t dram_type = info->dram_type;
Jacky Baid746daa2019-11-25 13:19:37 +080081 /*
82 * 15. Perform MRS commands as required to re-program
83 * timing registers in the SDRAM for the new frequency
84 * (in particular, CL, CWL and WR may need to be changed).
85 */
86
87 for (int i = 1; i <= num_rank; i++) {
88 for (int j = 0; j < 6; j++) {
Jacky Baid148bae2020-04-22 21:26:13 +080089 ddr4_mr_write(j, info->mr_table[pstate][j], 0, i, dram_type);
Jacky Baid746daa2019-11-25 13:19:37 +080090 }
Jacky Baid148bae2020-04-22 21:26:13 +080091 ddr4_mr_write(6, info->mr_table[pstate][7], 0, i, dram_type);
Jacky Baid746daa2019-11-25 13:19:37 +080092 }
93}
94
95void sw_pstate(uint32_t pstate, uint32_t drate)
96{
97 uint32_t val;
98
99 mmio_write_32(DDRC_SWCTL(0), 0x0);
100
101 /*
102 * Update any registers which may be required to
103 * change for the new frequency.
104 */
105 mmio_write_32(DDRC_MSTR2(0), pstate);
106 mmio_setbits_32(DDRC_MSTR(0), (0x1 << 29));
107
108 /*
109 * Toggle RFSHCTL3.refresh_update_level to allow the
110 * new refresh-related register values to propagate
111 * to the refresh logic.
112 */
113 val = mmio_read_32(DDRC_RFSHCTL3(0));
114 if (val & 0x2) {
115 mmio_write_32(DDRC_RFSHCTL3(0), val & 0xFFFFFFFD);
116 } else {
117 mmio_write_32(DDRC_RFSHCTL3(0), val | 0x2);
118 }
119
120 /*
121 * 19. If required, trigger the initialization in the PHY.
122 * If using the gen2 multiPHY, PLL initialization should
123 * be triggered at this point. See the PHY databook for
124 * details about the frequency change procedure.
125 */
126 mmio_write_32(DDRC_DFIMISC(0), 0x00000000 | (pstate << 8));
127 mmio_write_32(DDRC_DFIMISC(0), 0x00000020 | (pstate << 8));
128
129 /* wait DFISTAT.dfi_init_complete to 0 */
130 while (mmio_read_32(DDRC_DFISTAT(0)) & 0x1) {
131 ;
132 }
133
134 /* change the clock to the target frequency */
135 dram_clock_switch(drate, false);
136
137 mmio_write_32(DDRC_DFIMISC(0), 0x00000000 | (pstate << 8));
138
139 /* wait DFISTAT.dfi_init_complete to 1 */
140 while (!(mmio_read_32(DDRC_DFISTAT(0)) & 0x1)) {
141 ;
142 }
143
144 /*
145 * When changing frequencies the controller may violate the JEDEC
146 * requirement that no more than 16 refreshes should be issued within
147 * 2*tREFI. These extra refreshes are not expected to cause a problem
148 * in the SDRAM. This issue can be avoided by waiting for at least 2*tREFI
149 * before exiting self-refresh in step 19.
150 */
151 udelay(14);
152
153 /* 14. Exit the self-refresh state by setting PWRCTL.selfref_sw = 0. */
154 mmio_clrbits_32(DDRC_PWRCTL(0), (1 << 5));
155
156 while ((mmio_read_32(DDRC_STAT(0)) & 0x3f) == 0x23) {
157 ;
158 }
159}
160
161void ddr4_swffc(struct dram_info *info, unsigned int pstate)
162{
163 uint32_t drate = info->timing_info->fsp_table[pstate];
164
165 /*
166 * 1. set SWCTL.sw_done to disable quasi-dynamic register
167 * programming outside reset.
168 */
169 mmio_write_32(DDRC_SWCTL(0), 0x0);
170
171 /*
172 * 2. Write 0 to PCTRL_n.port_en. This blocks AXI port(s)
173 * from taking any transaction (blocks traffic on AXI ports).
174 */
175 mmio_write_32(DDRC_PCTRL_0(0), 0x0);
176
177 /*
178 * 3. Poll PSTAT.rd_port_busy_n=0 and PSTAT.wr_port_busy_n=0.
179 * Wait until all AXI ports are idle (the uMCTL2 core has to
180 * be idle).
181 */
182 while (mmio_read_32(DDRC_PSTAT(0)) & 0x10001) {
183 ;
184 }
185
186 /*
187 * 4. Write 0 to SBRCTL.scrub_en. Disable SBR, required only if
188 * SBR instantiated.
189 * 5. Poll SBRSTAT.scrub_busy=0.
190 * 6. Set DERATEEN.derate_enable = 0, if DERATEEN.derate_eanble = 1
191 * and the read latency (RL) value needs to change after the frequency
192 * change (LPDDR2/3/4 only).
193 * 7. Set DBG1.dis_hif=1 so that no new commands will be accepted by the uMCTL2.
194 */
195 mmio_setbits_32(DDRC_DBG1(0), (0x1 << 1));
196
197 /*
198 * 8. Poll DBGCAM.dbg_wr_q_empty and DBGCAM.dbg_rd_q_empty to ensure
199 * that write and read data buffers are empty.
200 */
201 while ((mmio_read_32(DDRC_DBGCAM(0)) & 0x06000000) != 0x06000000) {
202 ;
203 }
204
205 /*
206 * 9. For DDR4, update MR6 with the new tDLLK value via the Mode
207 * Register Write signals
208 * 10. Set DFILPCFG0.dfi_lp_en_sr = 0, if DFILPCFG0.dfi_lp_en_sr = 1,
209 * and wait until DFISTAT.dfi_lp_ack
210 * 11. If DFI PHY Master interface is active in uMCTL2, then disable it
211 * 12. Wait until STAT.operating_mode[1:0]!=11 indicating that the
212 * controller is not in self-refresh mode.
213 */
Jacky Bai699ece52020-04-13 11:07:40 +0800214 if ((mmio_read_32(DDRC_STAT(0)) & 0x3) == 0x3) {
215 VERBOSE("DRAM is in Self Refresh\n");
Jacky Baid746daa2019-11-25 13:19:37 +0800216 }
217
218 /*
219 * 13. Assert PWRCTL.selfref_sw for the DWC_ddr_umctl2 core to enter
220 * the self-refresh mode.
221 */
222 mmio_setbits_32(DDRC_PWRCTL(0), (1 << 5));
223
224 /*
225 * 14. Wait until STAT.operating_mode[1:0]==11 indicating that the
226 * controller core is in self-refresh mode.
227 */
228 while ((mmio_read_32(DDRC_STAT(0)) & 0x3f) != 0x23) {
229 ;
230 }
231
232 sw_pstate(pstate, drate);
233 dram_cfg_all_mr(info, pstate);
234
235 /* 23. Enable HIF commands by setting DBG1.dis_hif=0. */
236 mmio_clrbits_32(DDRC_DBG1(0), (0x1 << 1));
237
238 /*
239 * 24. Reset DERATEEN.derate_enable = 1 if DERATEEN.derate_enable
240 * has been set to 0 in step 6.
241 * 25. If DFI PHY Master interface was active before step 11 then
242 * enable it back by programming DFIPHYMSTR.phymstr_en = 1'b1.
243 * 26. Write 1 to PCTRL_n.port_en. AXI port(s) are no longer blocked
244 * from taking transactions (Re-enable traffic on AXI ports)
245 */
246 mmio_write_32(DDRC_PCTRL_0(0), 0x1);
247
248 /*
249 * 27. Write 1 to SBRCTL.scrub_en. Enable SBR if desired, only
250 * required if SBR instantiated.
251 */
252
253 /*
254 * set SWCTL.sw_done to enable quasi-dynamic register programming
255 * outside reset.
256 */
257 mmio_write_32(DDRC_SWCTL(0), 0x1);
258
259 /* wait SWSTAT.sw_done_ack to 1 */
260 while (!(mmio_read_32(DDRC_SWSTAT(0)) & 0x1)) {
261 ;
262 }
263}