blob: 5f9d90fe6dc06c7bdb38c1ead97550f7eebe5023 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Masahiro Yamadabb2ff9d2014-10-03 19:21:06 +09002/*
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +09003 * Copyright (C) 2012-2015 Panasonic Corporation
4 * Copyright (C) 2015-2017 Socionext Inc.
5 * Author: Masahiro Yamada <yamada.masahiro@socionext.com>
Masahiro Yamadabb2ff9d2014-10-03 19:21:06 +09006 */
7
8#include <common.h>
Masahiro Yamadae4e789d2017-01-21 18:05:24 +09009#include <linux/errno.h>
Masahiro Yamada2757ff02019-06-29 02:38:04 +090010#include <linux/io.h>
Masahiro Yamada609cd532017-10-13 19:21:55 +090011#include <linux/kernel.h>
12#include <linux/printk.h>
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090013#include <linux/sizes.h>
Masahiro Yamada609cd532017-10-13 19:21:55 +090014#include <asm/global_data.h>
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090015
Masahiro Yamadaf0f6a802019-07-10 20:07:45 +090016#include "init.h"
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090017#include "sg-regs.h"
Masahiro Yamada460483c2016-06-17 19:24:29 +090018#include "soc-info.h"
19
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090020DECLARE_GLOBAL_DATA_PTR;
21
Masahiro Yamada3dc80972017-02-05 10:52:12 +090022struct uniphier_dram_map {
23 unsigned long base;
24 unsigned long size;
25};
26
Masahiro Yamada12938bf2019-07-10 20:07:43 +090027static int uniphier_memconf_decode(struct uniphier_dram_map *dram_map,
28 unsigned long sparse_ch1_base, bool have_ch2)
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090029{
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090030 unsigned long size;
31 u32 val;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090032
Masahiro Yamada76b31242019-07-10 20:07:40 +090033 val = readl(sg_base + SG_MEMCONF);
Masahiro Yamadabb2ff9d2014-10-03 19:21:06 +090034
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090035 /* set up ch0 */
Masahiro Yamada7353ce32019-07-10 20:07:46 +090036 dram_map[0].base = 0x80000000;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090037
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090038 switch (val & SG_MEMCONF_CH0_SZ_MASK) {
39 case SG_MEMCONF_CH0_SZ_64M:
40 size = SZ_64M;
41 break;
42 case SG_MEMCONF_CH0_SZ_128M:
43 size = SZ_128M;
44 break;
45 case SG_MEMCONF_CH0_SZ_256M:
46 size = SZ_256M;
47 break;
48 case SG_MEMCONF_CH0_SZ_512M:
49 size = SZ_512M;
50 break;
51 case SG_MEMCONF_CH0_SZ_1G:
52 size = SZ_1G;
53 break;
54 default:
Masahiro Yamada1566db92017-02-20 12:09:00 +090055 pr_err("error: invalid value is set to MEMCONF ch0 size\n");
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090056 return -EINVAL;
Masahiro Yamadaa90b1102016-03-29 20:18:45 +090057 }
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090058
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090059 if ((val & SG_MEMCONF_CH0_NUM_MASK) == SG_MEMCONF_CH0_NUM_2)
60 size *= 2;
61
Masahiro Yamada3dc80972017-02-05 10:52:12 +090062 dram_map[0].size = size;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090063
64 /* set up ch1 */
Masahiro Yamada3dc80972017-02-05 10:52:12 +090065 dram_map[1].base = dram_map[0].base + size;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090066
67 if (val & SG_MEMCONF_SPARSEMEM) {
Masahiro Yamada12938bf2019-07-10 20:07:43 +090068 if (dram_map[1].base > sparse_ch1_base) {
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090069 pr_warn("Sparse mem is enabled, but ch0 and ch1 overlap\n");
70 pr_warn("Only ch0 is available\n");
Masahiro Yamada3dc80972017-02-05 10:52:12 +090071 dram_map[1].base = 0;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090072 return 0;
73 }
74
Masahiro Yamada12938bf2019-07-10 20:07:43 +090075 dram_map[1].base = sparse_ch1_base;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090076 }
77
78 switch (val & SG_MEMCONF_CH1_SZ_MASK) {
79 case SG_MEMCONF_CH1_SZ_64M:
80 size = SZ_64M;
81 break;
82 case SG_MEMCONF_CH1_SZ_128M:
83 size = SZ_128M;
84 break;
85 case SG_MEMCONF_CH1_SZ_256M:
86 size = SZ_256M;
87 break;
88 case SG_MEMCONF_CH1_SZ_512M:
89 size = SZ_512M;
90 break;
91 case SG_MEMCONF_CH1_SZ_1G:
92 size = SZ_1G;
93 break;
94 default:
Masahiro Yamada1566db92017-02-20 12:09:00 +090095 pr_err("error: invalid value is set to MEMCONF ch1 size\n");
Masahiro Yamadaa90b1102016-03-29 20:18:45 +090096 return -EINVAL;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090097 }
Masahiro Yamadaa90b1102016-03-29 20:18:45 +090098
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090099 if ((val & SG_MEMCONF_CH1_NUM_MASK) == SG_MEMCONF_CH1_NUM_2)
100 size *= 2;
Masahiro Yamadaa90b1102016-03-29 20:18:45 +0900101
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900102 dram_map[1].size = size;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900103
Masahiro Yamada12938bf2019-07-10 20:07:43 +0900104 if (!have_ch2 || val & SG_MEMCONF_CH2_DISABLE)
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900105 return 0;
106
107 /* set up ch2 */
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900108 dram_map[2].base = dram_map[1].base + size;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900109
110 switch (val & SG_MEMCONF_CH2_SZ_MASK) {
111 case SG_MEMCONF_CH2_SZ_64M:
112 size = SZ_64M;
113 break;
114 case SG_MEMCONF_CH2_SZ_128M:
115 size = SZ_128M;
116 break;
117 case SG_MEMCONF_CH2_SZ_256M:
118 size = SZ_256M;
119 break;
120 case SG_MEMCONF_CH2_SZ_512M:
121 size = SZ_512M;
122 break;
123 case SG_MEMCONF_CH2_SZ_1G:
124 size = SZ_1G;
125 break;
126 default:
Masahiro Yamada1566db92017-02-20 12:09:00 +0900127 pr_err("error: invalid value is set to MEMCONF ch2 size\n");
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900128 return -EINVAL;
129 }
130
131 if ((val & SG_MEMCONF_CH2_NUM_MASK) == SG_MEMCONF_CH2_NUM_2)
132 size *= 2;
133
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900134 dram_map[2].size = size;
Masahiro Yamadabb2ff9d2014-10-03 19:21:06 +0900135
Masahiro Yamadabb2ff9d2014-10-03 19:21:06 +0900136 return 0;
137}
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900138
Masahiro Yamada12938bf2019-07-10 20:07:43 +0900139static int uniphier_ld4_dram_map_get(struct uniphier_dram_map dram_map[])
140{
141 return uniphier_memconf_decode(dram_map, 0xc0000000, false);
142}
143
144static int uniphier_pro4_dram_map_get(struct uniphier_dram_map dram_map[])
145{
146 return uniphier_memconf_decode(dram_map, 0xa0000000, false);
147}
148
149static int uniphier_pxs2_dram_map_get(struct uniphier_dram_map dram_map[])
150{
151 return uniphier_memconf_decode(dram_map, 0xc0000000, true);
152}
153
154struct uniphier_dram_init_data {
155 unsigned int soc_id;
156 int (*dram_map_get)(struct uniphier_dram_map dram_map[]);
157};
158
159static const struct uniphier_dram_init_data uniphier_dram_init_data[] = {
160 {
161 .soc_id = UNIPHIER_LD4_ID,
162 .dram_map_get = uniphier_ld4_dram_map_get,
163 },
164 {
165 .soc_id = UNIPHIER_PRO4_ID,
166 .dram_map_get = uniphier_pro4_dram_map_get,
167 },
168 {
169 .soc_id = UNIPHIER_SLD8_ID,
170 .dram_map_get = uniphier_ld4_dram_map_get,
171 },
172 {
173 .soc_id = UNIPHIER_PRO5_ID,
174 .dram_map_get = uniphier_ld4_dram_map_get,
175 },
176 {
177 .soc_id = UNIPHIER_PXS2_ID,
178 .dram_map_get = uniphier_pxs2_dram_map_get,
179 },
180 {
181 .soc_id = UNIPHIER_LD6B_ID,
182 .dram_map_get = uniphier_pxs2_dram_map_get,
183 },
184 {
185 .soc_id = UNIPHIER_LD11_ID,
186 .dram_map_get = uniphier_ld4_dram_map_get,
187 },
188 {
189 .soc_id = UNIPHIER_LD20_ID,
190 .dram_map_get = uniphier_pxs2_dram_map_get,
191 },
192 {
193 .soc_id = UNIPHIER_PXS3_ID,
194 .dram_map_get = uniphier_pxs2_dram_map_get,
195 },
196};
197UNIPHIER_DEFINE_SOCDATA_FUNC(uniphier_get_dram_init_data,
198 uniphier_dram_init_data)
199
200static int uniphier_dram_map_get(struct uniphier_dram_map *dram_map)
201{
202 const struct uniphier_dram_init_data *data;
203
204 data = uniphier_get_dram_init_data();
205 if (!data) {
206 pr_err("unsupported SoC\n");
207 return -ENOTSUPP;
208 }
209
210 return data->dram_map_get(dram_map);
211}
212
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900213int dram_init(void)
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900214{
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900215 struct uniphier_dram_map dram_map[3] = {};
Masahiro Yamadaefcae8e2019-07-10 20:07:44 +0900216 bool valid_bank_found = false;
217 unsigned long prev_top;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900218 int ret, i;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900219
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900220 gd->ram_size = 0;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900221
Masahiro Yamada12938bf2019-07-10 20:07:43 +0900222 ret = uniphier_dram_map_get(dram_map);
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900223 if (ret)
224 return ret;
225
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900226 for (i = 0; i < ARRAY_SIZE(dram_map); i++) {
Masahiro Yamada86f90c22018-01-06 22:59:24 +0900227 unsigned long max_size;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900228
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900229 if (!dram_map[i].size)
Masahiro Yamadaefcae8e2019-07-10 20:07:44 +0900230 continue;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900231
232 /*
233 * U-Boot relocates itself to the tail of the memory region,
234 * but it does not expect sparse memory. We use the first
235 * contiguous chunk here.
236 */
Masahiro Yamadaefcae8e2019-07-10 20:07:44 +0900237 if (valid_bank_found && prev_top < dram_map[i].base)
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900238 break;
239
Masahiro Yamada86f90c22018-01-06 22:59:24 +0900240 /*
241 * Do not use memory that exceeds 32bit address range. U-Boot
242 * relocates itself to the end of the effectively available RAM.
243 * This could be a problem for DMA engines that do not support
244 * 64bit address (SDMA of SDHCI, UniPhier AV-ether, etc.)
245 */
246 if (dram_map[i].base >= 1ULL << 32)
247 break;
248
249 max_size = (1ULL << 32) - dram_map[i].base;
250
Masahiro Yamada23929fb2020-01-30 22:20:37 +0900251 gd->ram_size = min(dram_map[i].size, max_size);
Masahiro Yamadaefcae8e2019-07-10 20:07:44 +0900252
Masahiro Yamada7353ce32019-07-10 20:07:46 +0900253 if (!valid_bank_found)
254 gd->ram_base = dram_map[i].base;
255
Masahiro Yamadaefcae8e2019-07-10 20:07:44 +0900256 prev_top = dram_map[i].base + dram_map[i].size;
257 valid_bank_found = true;
Masahiro Yamadaa90b1102016-03-29 20:18:45 +0900258 }
259
Masahiro Yamada5737e472018-01-06 22:59:26 +0900260 /*
261 * LD20 uses the last 64 byte for each channel for dynamic
262 * DDR PHY training
263 */
264 if (uniphier_get_soc_id() == UNIPHIER_LD20_ID)
265 gd->ram_size -= 64;
266
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900267 return 0;
268}
Masahiro Yamadaa90b1102016-03-29 20:18:45 +0900269
Simon Glass2f949c32017-03-31 08:40:32 -0600270int dram_init_banksize(void)
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900271{
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900272 struct uniphier_dram_map dram_map[3] = {};
Masahiro Yamadaf0f6a802019-07-10 20:07:45 +0900273 unsigned long base, top;
274 bool valid_bank_found = false;
Masahiro Yamada12938bf2019-07-10 20:07:43 +0900275 int ret, i;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900276
Masahiro Yamada12938bf2019-07-10 20:07:43 +0900277 ret = uniphier_dram_map_get(dram_map);
278 if (ret)
279 return ret;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900280
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900281 for (i = 0; i < ARRAY_SIZE(dram_map); i++) {
Masahiro Yamadaf0f6a802019-07-10 20:07:45 +0900282 if (i < ARRAY_SIZE(gd->bd->bi_dram)) {
283 gd->bd->bi_dram[i].start = dram_map[i].base;
284 gd->bd->bi_dram[i].size = dram_map[i].size;
285 }
286
287 if (!dram_map[i].size)
288 continue;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900289
Masahiro Yamadaf0f6a802019-07-10 20:07:45 +0900290 if (!valid_bank_found)
291 base = dram_map[i].base;
292 top = dram_map[i].base + dram_map[i].size;
293 valid_bank_found = true;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900294 }
Simon Glass2f949c32017-03-31 08:40:32 -0600295
Masahiro Yamadaf0f6a802019-07-10 20:07:45 +0900296 if (!valid_bank_found)
297 return -EINVAL;
298
299 /* map all the DRAM regions */
300 uniphier_mem_map_init(base, top - base);
301
Simon Glass2f949c32017-03-31 08:40:32 -0600302 return 0;
Masahiro Yamada460483c2016-06-17 19:24:29 +0900303}