blob: 3ec98af536e8bd588ca34694e5611924aad7c1ca [file] [log] [blame]
Kever Yang9c88b242019-11-15 11:04:40 +08001// SPDX-License-Identifier: GPL-2.0
2/*
3 * (C) Copyright 2018 Rockchip Electronics Co., Ltd.
4 */
5
Kever Yang9c88b242019-11-15 11:04:40 +08006#include <ram.h>
7#include <asm/io.h>
8#include <asm/arch-rockchip/sdram.h>
9#include <asm/arch-rockchip/sdram_pctl_px30.h>
Simon Glassdbd79542020-05-10 11:40:11 -060010#include <linux/delay.h>
Kever Yang9c88b242019-11-15 11:04:40 +080011
12/*
13 * rank = 1: cs0
14 * rank = 2: cs1
15 */
16void pctl_read_mr(void __iomem *pctl_base, u32 rank, u32 mr_num)
17{
18 writel((rank << 4) | (1 << 0), pctl_base + DDR_PCTL2_MRCTRL0);
19 writel((mr_num << 8), pctl_base + DDR_PCTL2_MRCTRL1);
20 setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
21 while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
22 continue;
Jagan Teki38f23302022-12-14 23:20:50 +053023 while (readl(pctl_base + DDR_PCTL2_MRSTAT) & PCTL2_MR_WR_BUSY)
Kever Yang9c88b242019-11-15 11:04:40 +080024 continue;
25}
26
27/* rank = 1: cs0
28 * rank = 2: cs1
29 * rank = 3: cs0 & cs1
30 * note: be careful of keep mr original val
31 */
32int pctl_write_mr(void __iomem *pctl_base, u32 rank, u32 mr_num, u32 arg,
33 u32 dramtype)
34{
Jagan Teki38f23302022-12-14 23:20:50 +053035 while (readl(pctl_base + DDR_PCTL2_MRSTAT) & PCTL2_MR_WR_BUSY)
Kever Yang9c88b242019-11-15 11:04:40 +080036 continue;
37 if (dramtype == DDR3 || dramtype == DDR4) {
38 writel((mr_num << 12) | (rank << 4) | (0 << 0),
39 pctl_base + DDR_PCTL2_MRCTRL0);
40 writel(arg, pctl_base + DDR_PCTL2_MRCTRL1);
41 } else {
42 writel((rank << 4) | (0 << 0),
43 pctl_base + DDR_PCTL2_MRCTRL0);
44 writel((mr_num << 8) | (arg & 0xff),
45 pctl_base + DDR_PCTL2_MRCTRL1);
46 }
47
48 setbits_le32(pctl_base + DDR_PCTL2_MRCTRL0, 1u << 31);
49 while (readl(pctl_base + DDR_PCTL2_MRCTRL0) & (1u << 31))
50 continue;
Jagan Teki38f23302022-12-14 23:20:50 +053051 while (readl(pctl_base + DDR_PCTL2_MRSTAT) & PCTL2_MR_WR_BUSY)
Kever Yang9c88b242019-11-15 11:04:40 +080052 continue;
53
54 return 0;
55}
56
57/*
58 * rank : 1:cs0, 2:cs1, 3:cs0&cs1
59 * vrefrate: 4500: 45%,
60 */
61int pctl_write_vrefdq(void __iomem *pctl_base, u32 rank, u32 vrefrate,
62 u32 dramtype)
63{
64 u32 tccd_l, value;
65 u32 dis_auto_zq = 0;
66
67 if (dramtype != DDR4 || vrefrate < 4500 ||
68 vrefrate > 9200)
69 return (-1);
70
71 tccd_l = (readl(pctl_base + DDR_PCTL2_DRAMTMG4) >> 16) & 0xf;
72 tccd_l = (tccd_l - 4) << 10;
73
74 if (vrefrate > 7500) {
75 /* range 1 */
76 value = ((vrefrate - 6000) / 65) | tccd_l;
77 } else {
78 /* range 2 */
79 value = ((vrefrate - 4500) / 65) | tccd_l | (1 << 6);
80 }
81
82 dis_auto_zq = pctl_dis_zqcs_aref(pctl_base);
83
84 /* enable vrefdq calibratin */
85 pctl_write_mr(pctl_base, rank, 6, value | (1 << 7), dramtype);
86 udelay(1);/* tvrefdqe */
87 /* write vrefdq value */
88 pctl_write_mr(pctl_base, rank, 6, value | (1 << 7), dramtype);
89 udelay(1);/* tvref_time */
90 pctl_write_mr(pctl_base, rank, 6, value | (0 << 7), dramtype);
91 udelay(1);/* tvrefdqx */
92
93 pctl_rest_zqcs_aref(pctl_base, dis_auto_zq);
94
95 return 0;
96}
97
98static int upctl2_update_ref_reg(void __iomem *pctl_base)
99{
100 u32 ret;
101
102 ret = readl(pctl_base + DDR_PCTL2_RFSHCTL3) ^ (1 << 1);
103 writel(ret, pctl_base + DDR_PCTL2_RFSHCTL3);
104
105 return 0;
106}
107
108u32 pctl_dis_zqcs_aref(void __iomem *pctl_base)
109{
110 u32 dis_auto_zq = 0;
111
112 /* disable zqcs */
113 if (!(readl(pctl_base + DDR_PCTL2_ZQCTL0) &
114 (1ul << 31))) {
115 dis_auto_zq = 1;
116 setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
117 }
118
119 /* disable auto refresh */
120 setbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
121
122 upctl2_update_ref_reg(pctl_base);
123
124 return dis_auto_zq;
125}
126
127void pctl_rest_zqcs_aref(void __iomem *pctl_base, u32 dis_auto_zq)
128{
129 /* restore zqcs */
130 if (dis_auto_zq)
131 clrbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1 << 31);
132
133 /* restore auto refresh */
134 clrbits_le32(pctl_base + DDR_PCTL2_RFSHCTL3, 1);
135
136 upctl2_update_ref_reg(pctl_base);
137}
138
139u32 pctl_remodify_sdram_params(struct ddr_pctl_regs *pctl_regs,
140 struct sdram_cap_info *cap_info,
141 u32 dram_type)
142{
143 u32 tmp = 0, tmp_adr = 0, i;
144
145 for (i = 0; pctl_regs->pctl[i][0] != 0xFFFFFFFF; i++) {
146 if (pctl_regs->pctl[i][0] == 0) {
147 tmp = pctl_regs->pctl[i][1];/* MSTR */
148 tmp_adr = i;
149 }
150 }
151
152 tmp &= ~((3ul << 30) | (3ul << 24) | (3ul << 12));
153
154 switch (cap_info->dbw) {
155 case 2:
156 tmp |= (3ul << 30);
157 break;
158 case 1:
159 tmp |= (2ul << 30);
160 break;
161 case 0:
162 default:
163 tmp |= (1ul << 30);
164 break;
165 }
166
167 /*
168 * If DDR3 or DDR4 MSTR.active_ranks=1,
169 * it will gate memory clock when enter power down.
170 * Force set active_ranks to 3 to workaround it.
171 */
172 if (cap_info->rank == 2 || dram_type == DDR3 ||
173 dram_type == DDR4)
174 tmp |= 3 << 24;
175 else
176 tmp |= 1 << 24;
177
178 tmp |= (2 - cap_info->bw) << 12;
179
180 pctl_regs->pctl[tmp_adr][1] = tmp;
181
182 return 0;
183}
184
185int pctl_cfg(void __iomem *pctl_base, struct ddr_pctl_regs *pctl_regs,
186 u32 sr_idle, u32 pd_idle)
187{
188 u32 i;
189
190 for (i = 0; pctl_regs->pctl[i][0] != 0xFFFFFFFF; i++) {
191 writel(pctl_regs->pctl[i][1],
192 pctl_base + pctl_regs->pctl[i][0]);
193 }
194 clrsetbits_le32(pctl_base + DDR_PCTL2_PWRTMG,
195 (0xff << 16) | 0x1f,
196 ((sr_idle & 0xff) << 16) | (pd_idle & 0x1f));
197
198 clrsetbits_le32(pctl_base + DDR_PCTL2_HWLPCTL,
199 0xfff << 16,
200 5 << 16);
201 /* disable zqcs */
202 setbits_le32(pctl_base + DDR_PCTL2_ZQCTL0, 1u << 31);
203
204 return 0;
205}