blob: e5c80fb83b3f3019c8222287fde1a686e9e008b7 [file] [log] [blame]
Kever Yang9c88b242019-11-15 11:04:40 +08001// SPDX-License-Identifier: GPL-2.0
2/*
3 * (C) Copyright 2018 Rockchip Electronics Co., Ltd.
4 */
5
6#include <common.h>
7#include <ram.h>
8#include <asm/io.h>
9#include <asm/arch-rockchip/sdram.h>
10#include <asm/arch-rockchip/sdram_pctl_px30.h>
Simon Glassdbd79542020-05-10 11:40:11 -060011#include <linux/delay.h>
Kever Yang9c88b242019-11-15 11:04:40 +080012
13/*
14 * rank = 1: cs0
15 * rank = 2: cs1
16 */
17void pctl_read_mr(void __iomem *pctl_base, u32 rank, u32 mr_num)
18{
19 writel((rank << 4) | (1 << 0), pctl_base + DDR_PCTL2_MRCTRL0);
20 writel((mr_num << 8), pctl_base + DDR_PCTL2_MRCTRL1);
21 setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
22 while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
23 continue;
Jagan Teki38f23302022-12-14 23:20:50 +053024 while (readl(pctl_base + DDR_PCTL2_MRSTAT) & PCTL2_MR_WR_BUSY)
Kever Yang9c88b242019-11-15 11:04:40 +080025 continue;
26}
27
28/* rank = 1: cs0
29 * rank = 2: cs1
30 * rank = 3: cs0 & cs1
31 * note: be careful of keep mr original val
32 */
33int pctl_write_mr(void __iomem *pctl_base, u32 rank, u32 mr_num, u32 arg,
34 u32 dramtype)
35{
Jagan Teki38f23302022-12-14 23:20:50 +053036 while (readl(pctl_base + DDR_PCTL2_MRSTAT) & PCTL2_MR_WR_BUSY)
Kever Yang9c88b242019-11-15 11:04:40 +080037 continue;
38 if (dramtype == DDR3 || dramtype == DDR4) {
39 writel((mr_num << 12) | (rank << 4) | (0 << 0),
40 pctl_base + DDR_PCTL2_MRCTRL0);
41 writel(arg, pctl_base + DDR_PCTL2_MRCTRL1);
42 } else {
43 writel((rank << 4) | (0 << 0),
44 pctl_base + DDR_PCTL2_MRCTRL0);
45 writel((mr_num << 8) | (arg & 0xff),
46 pctl_base + DDR_PCTL2_MRCTRL1);
47 }
48
49 setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
50 while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
51 continue;
Jagan Teki38f23302022-12-14 23:20:50 +053052 while (readl(pctl_base + DDR_PCTL2_MRSTAT) & PCTL2_MR_WR_BUSY)
Kever Yang9c88b242019-11-15 11:04:40 +080053 continue;
54
55 return 0;
56}
57
58/*
59 * rank : 1:cs0, 2:cs1, 3:cs0&cs1
60 * vrefrate: 4500: 45%,
61 */
62int pctl_write_vrefdq(void __iomem *pctl_base, u32 rank, u32 vrefrate,
63 u32 dramtype)
64{
65 u32 tccd_l, value;
66 u32 dis_auto_zq = 0;
67
68 if (dramtype != DDR4 || vrefrate < 4500 ||
69 vrefrate > 9200)
70 return (-1);
71
72 tccd_l = (readl(pctl_base + DDR_PCTL2_DRAMTMG4) >> 16) & 0xf;
73 tccd_l = (tccd_l - 4) << 10;
74
75 if (vrefrate > 7500) {
76 /* range 1 */
77 value = ((vrefrate - 6000) / 65) | tccd_l;
78 } else {
79 /* range 2 */
80 value = ((vrefrate - 4500) / 65) | tccd_l | (1 << 6);
81 }
82
83 dis_auto_zq = pctl_dis_zqcs_aref(pctl_base);
84
85 /* enable vrefdq calibratin */
86 pctl_write_mr(pctl_base, rank, 6, value | (1 << 7), dramtype);
87 udelay(1);/* tvrefdqe */
88 /* write vrefdq value */
89 pctl_write_mr(pctl_base, rank, 6, value | (1 << 7), dramtype);
90 udelay(1);/* tvref_time */
91 pctl_write_mr(pctl_base, rank, 6, value | (0 << 7), dramtype);
92 udelay(1);/* tvrefdqx */
93
94 pctl_rest_zqcs_aref(pctl_base, dis_auto_zq);
95
96 return 0;
97}
98
99static int upctl2_update_ref_reg(void __iomem *pctl_base)
100{
101 u32 ret;
102
103 ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
104 writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
105
106 return 0;
107}
108
109u32 pctl_dis_zqcs_aref(void __iomem *pctl_base)
110{
111 u32 dis_auto_zq = 0;
112
113 /* disable zqcs */
114 if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
115 (1ul << 31))) {
116 dis_auto_zq = 1;
117 setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
118 }
119
120 /* disable auto refresh */
121 setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
122
123 upctl2_update_ref_reg(pctl_base);
124
125 return dis_auto_zq;
126}
127
128void pctl_rest_zqcs_aref(void __iomem *pctl_base, u32 dis_auto_zq)
129{
130 /* restore zqcs */
131 if (dis_auto_zq)
132 clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
133
134 /* restore auto refresh */
135 clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
136
137 upctl2_update_ref_reg(pctl_base);
138}
139
140u32 pctl_remodify_sdram_params(struct ddr_pctl_regs *pctl_regs,
141 struct sdram_cap_info *cap_info,
142 u32 dram_type)
143{
144 u32 tmp = 0, tmp_adr = 0, i;
145
146 for (i = 0; pctl_regs->pctl[i][0] != 0xFFFFFFFF; i++) {
147 if (pctl_regs->pctl[i][0] == 0) {
148 tmp = pctl_regs->pctl[i][1];/* MSTR */
149 tmp_adr = i;
150 }
151 }
152
153 tmp &= ~((3ul << 30) | (3ul << 24) | (3ul << 12));
154
155 switch (cap_info->dbw) {
156 case 2:
157 tmp |= (3ul << 30);
158 break;
159 case 1:
160 tmp |= (2ul << 30);
161 break;
162 case 0:
163 default:
164 tmp |= (1ul << 30);
165 break;
166 }
167
168 /*
169 * If DDR3 or DDR4 MSTR.active_ranks=1,
170 * it will gate memory clock when enter power down.
171 * Force set active_ranks to 3 to workaround it.
172 */
173 if (cap_info->rank == 2 || dram_type == DDR3 ||
174 dram_type == DDR4)
175 tmp |= 3 << 24;
176 else
177 tmp |= 1 << 24;
178
179 tmp |= (2 - cap_info->bw) << 12;
180
181 pctl_regs->pctl[tmp_adr][1] = tmp;
182
183 return 0;
184}
185
186int pctl_cfg(void __iomem *pctl_base, struct ddr_pctl_regs *pctl_regs,
187 u32 sr_idle, u32 pd_idle)
188{
189 u32 i;
190
191 for (i = 0; pctl_regs->pctl[i][0] != 0xFFFFFFFF; i++) {
192 writel(pctl_regs->pctl[i][1],
193 pctl_base + pctl_regs->pctl[i][0]);
194 }
195 clrsetbits_le32(pctl_base + DDR_PCTL2_PWRTMG,
196 (0xff << 16) | 0x1f,
197 ((sr_idle & 0xff) << 16) | (pd_idle & 0x1f));
198
199 clrsetbits_le32(pctl_base + DDR_PCTL2_HWLPCTL,
200 0xfff << 16,
201 5 << 16);
202 /* disable zqcs */
203 setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1u << 31);
204
205 return 0;
206}