blob: 7f2753190c233372e21c9f9da81ee9fc1b6a6453 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Masahiro Yamadabb2ff9d2014-10-03 19:21:06 +09002/*
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +09003 * Copyright (C) 2012-2015 Panasonic Corporation
4 * Copyright (C) 2015-2017 Socionext Inc.
5 * Author: Masahiro Yamada <yamada.masahiro@socionext.com>
Masahiro Yamadabb2ff9d2014-10-03 19:21:06 +09006 */
7
Simon Glass97589732020-05-10 11:40:02 -06008#include <init.h>
Masahiro Yamadae4e789d2017-01-21 18:05:24 +09009#include <linux/errno.h>
Masahiro Yamada2757ff02019-06-29 02:38:04 +090010#include <linux/io.h>
Masahiro Yamada609cd532017-10-13 19:21:55 +090011#include <linux/kernel.h>
12#include <linux/printk.h>
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090013#include <linux/sizes.h>
Masahiro Yamada609cd532017-10-13 19:21:55 +090014#include <asm/global_data.h>
Masahiro Yamadae6314412020-05-20 12:31:27 +090015#include <asm/u-boot.h>
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090016
Masahiro Yamadaf0f6a802019-07-10 20:07:45 +090017#include "init.h"
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090018#include "sg-regs.h"
Masahiro Yamada460483c2016-06-17 19:24:29 +090019#include "soc-info.h"
20
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090021DECLARE_GLOBAL_DATA_PTR;
22
Masahiro Yamada3dc80972017-02-05 10:52:12 +090023struct uniphier_dram_map {
24 unsigned long base;
25 unsigned long size;
26};
27
Masahiro Yamada12938bf2019-07-10 20:07:43 +090028static int uniphier_memconf_decode(struct uniphier_dram_map *dram_map,
29 unsigned long sparse_ch1_base, bool have_ch2)
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090030{
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090031 unsigned long size;
32 u32 val;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090033
Masahiro Yamada76b31242019-07-10 20:07:40 +090034 val = readl(sg_base + SG_MEMCONF);
Masahiro Yamadabb2ff9d2014-10-03 19:21:06 +090035
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090036 /* set up ch0 */
Masahiro Yamada7353ce32019-07-10 20:07:46 +090037 dram_map[0].base = 0x80000000;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090038
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090039 switch (val & SG_MEMCONF_CH0_SZ_MASK) {
40 case SG_MEMCONF_CH0_SZ_64M:
41 size = SZ_64M;
42 break;
43 case SG_MEMCONF_CH0_SZ_128M:
44 size = SZ_128M;
45 break;
46 case SG_MEMCONF_CH0_SZ_256M:
47 size = SZ_256M;
48 break;
49 case SG_MEMCONF_CH0_SZ_512M:
50 size = SZ_512M;
51 break;
52 case SG_MEMCONF_CH0_SZ_1G:
53 size = SZ_1G;
54 break;
55 default:
Masahiro Yamada1566db92017-02-20 12:09:00 +090056 pr_err("error: invalid value is set to MEMCONF ch0 size\n");
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090057 return -EINVAL;
Masahiro Yamadaa90b1102016-03-29 20:18:45 +090058 }
Masahiro Yamadab4782cd2015-09-11 20:17:49 +090059
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090060 if ((val & SG_MEMCONF_CH0_NUM_MASK) == SG_MEMCONF_CH0_NUM_2)
61 size *= 2;
62
Masahiro Yamada3dc80972017-02-05 10:52:12 +090063 dram_map[0].size = size;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090064
65 /* set up ch1 */
Masahiro Yamada3dc80972017-02-05 10:52:12 +090066 dram_map[1].base = dram_map[0].base + size;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090067
68 if (val & SG_MEMCONF_SPARSEMEM) {
Masahiro Yamada12938bf2019-07-10 20:07:43 +090069 if (dram_map[1].base > sparse_ch1_base) {
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090070 pr_warn("Sparse mem is enabled, but ch0 and ch1 overlap\n");
71 pr_warn("Only ch0 is available\n");
Masahiro Yamada3dc80972017-02-05 10:52:12 +090072 dram_map[1].base = 0;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090073 return 0;
74 }
75
Masahiro Yamada12938bf2019-07-10 20:07:43 +090076 dram_map[1].base = sparse_ch1_base;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090077 }
78
79 switch (val & SG_MEMCONF_CH1_SZ_MASK) {
80 case SG_MEMCONF_CH1_SZ_64M:
81 size = SZ_64M;
82 break;
83 case SG_MEMCONF_CH1_SZ_128M:
84 size = SZ_128M;
85 break;
86 case SG_MEMCONF_CH1_SZ_256M:
87 size = SZ_256M;
88 break;
89 case SG_MEMCONF_CH1_SZ_512M:
90 size = SZ_512M;
91 break;
92 case SG_MEMCONF_CH1_SZ_1G:
93 size = SZ_1G;
94 break;
95 default:
Masahiro Yamada1566db92017-02-20 12:09:00 +090096 pr_err("error: invalid value is set to MEMCONF ch1 size\n");
Masahiro Yamadaa90b1102016-03-29 20:18:45 +090097 return -EINVAL;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +090098 }
Masahiro Yamadaa90b1102016-03-29 20:18:45 +090099
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900100 if ((val & SG_MEMCONF_CH1_NUM_MASK) == SG_MEMCONF_CH1_NUM_2)
101 size *= 2;
Masahiro Yamadaa90b1102016-03-29 20:18:45 +0900102
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900103 dram_map[1].size = size;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900104
Masahiro Yamada12938bf2019-07-10 20:07:43 +0900105 if (!have_ch2 || val & SG_MEMCONF_CH2_DISABLE)
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900106 return 0;
107
108 /* set up ch2 */
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900109 dram_map[2].base = dram_map[1].base + size;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900110
111 switch (val & SG_MEMCONF_CH2_SZ_MASK) {
112 case SG_MEMCONF_CH2_SZ_64M:
113 size = SZ_64M;
114 break;
115 case SG_MEMCONF_CH2_SZ_128M:
116 size = SZ_128M;
117 break;
118 case SG_MEMCONF_CH2_SZ_256M:
119 size = SZ_256M;
120 break;
121 case SG_MEMCONF_CH2_SZ_512M:
122 size = SZ_512M;
123 break;
124 case SG_MEMCONF_CH2_SZ_1G:
125 size = SZ_1G;
126 break;
127 default:
Masahiro Yamada1566db92017-02-20 12:09:00 +0900128 pr_err("error: invalid value is set to MEMCONF ch2 size\n");
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900129 return -EINVAL;
130 }
131
132 if ((val & SG_MEMCONF_CH2_NUM_MASK) == SG_MEMCONF_CH2_NUM_2)
133 size *= 2;
134
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900135 dram_map[2].size = size;
Masahiro Yamadabb2ff9d2014-10-03 19:21:06 +0900136
Masahiro Yamadabb2ff9d2014-10-03 19:21:06 +0900137 return 0;
138}
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900139
Masahiro Yamada12938bf2019-07-10 20:07:43 +0900140static int uniphier_ld4_dram_map_get(struct uniphier_dram_map dram_map[])
141{
142 return uniphier_memconf_decode(dram_map, 0xc0000000, false);
143}
144
145static int uniphier_pro4_dram_map_get(struct uniphier_dram_map dram_map[])
146{
147 return uniphier_memconf_decode(dram_map, 0xa0000000, false);
148}
149
150static int uniphier_pxs2_dram_map_get(struct uniphier_dram_map dram_map[])
151{
152 return uniphier_memconf_decode(dram_map, 0xc0000000, true);
153}
154
155struct uniphier_dram_init_data {
156 unsigned int soc_id;
157 int (*dram_map_get)(struct uniphier_dram_map dram_map[]);
158};
159
160static const struct uniphier_dram_init_data uniphier_dram_init_data[] = {
161 {
162 .soc_id = UNIPHIER_LD4_ID,
163 .dram_map_get = uniphier_ld4_dram_map_get,
164 },
165 {
166 .soc_id = UNIPHIER_PRO4_ID,
167 .dram_map_get = uniphier_pro4_dram_map_get,
168 },
169 {
170 .soc_id = UNIPHIER_SLD8_ID,
171 .dram_map_get = uniphier_ld4_dram_map_get,
172 },
173 {
174 .soc_id = UNIPHIER_PRO5_ID,
175 .dram_map_get = uniphier_ld4_dram_map_get,
176 },
177 {
178 .soc_id = UNIPHIER_PXS2_ID,
179 .dram_map_get = uniphier_pxs2_dram_map_get,
180 },
181 {
182 .soc_id = UNIPHIER_LD6B_ID,
183 .dram_map_get = uniphier_pxs2_dram_map_get,
184 },
185 {
186 .soc_id = UNIPHIER_LD11_ID,
187 .dram_map_get = uniphier_ld4_dram_map_get,
188 },
189 {
190 .soc_id = UNIPHIER_LD20_ID,
191 .dram_map_get = uniphier_pxs2_dram_map_get,
192 },
193 {
194 .soc_id = UNIPHIER_PXS3_ID,
195 .dram_map_get = uniphier_pxs2_dram_map_get,
196 },
197};
198UNIPHIER_DEFINE_SOCDATA_FUNC(uniphier_get_dram_init_data,
199 uniphier_dram_init_data)
200
201static int uniphier_dram_map_get(struct uniphier_dram_map *dram_map)
202{
203 const struct uniphier_dram_init_data *data;
204
205 data = uniphier_get_dram_init_data();
206 if (!data) {
207 pr_err("unsupported SoC\n");
208 return -ENOTSUPP;
209 }
210
211 return data->dram_map_get(dram_map);
212}
213
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900214int dram_init(void)
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900215{
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900216 struct uniphier_dram_map dram_map[3] = {};
Masahiro Yamadaefcae8e2019-07-10 20:07:44 +0900217 bool valid_bank_found = false;
218 unsigned long prev_top;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900219 int ret, i;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900220
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900221 gd->ram_size = 0;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900222
Masahiro Yamada12938bf2019-07-10 20:07:43 +0900223 ret = uniphier_dram_map_get(dram_map);
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900224 if (ret)
225 return ret;
226
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900227 for (i = 0; i < ARRAY_SIZE(dram_map); i++) {
Masahiro Yamada86f90c22018-01-06 22:59:24 +0900228 unsigned long max_size;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900229
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900230 if (!dram_map[i].size)
Masahiro Yamadaefcae8e2019-07-10 20:07:44 +0900231 continue;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900232
233 /*
234 * U-Boot relocates itself to the tail of the memory region,
235 * but it does not expect sparse memory. We use the first
236 * contiguous chunk here.
237 */
Masahiro Yamadaefcae8e2019-07-10 20:07:44 +0900238 if (valid_bank_found && prev_top < dram_map[i].base)
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900239 break;
240
Masahiro Yamada86f90c22018-01-06 22:59:24 +0900241 /*
242 * Do not use memory that exceeds 32bit address range. U-Boot
243 * relocates itself to the end of the effectively available RAM.
244 * This could be a problem for DMA engines that do not support
245 * 64bit address (SDMA of SDHCI, UniPhier AV-ether, etc.)
246 */
247 if (dram_map[i].base >= 1ULL << 32)
248 break;
249
250 max_size = (1ULL << 32) - dram_map[i].base;
251
Masahiro Yamada23929fb2020-01-30 22:20:37 +0900252 gd->ram_size = min(dram_map[i].size, max_size);
Masahiro Yamadaefcae8e2019-07-10 20:07:44 +0900253
Masahiro Yamada7353ce32019-07-10 20:07:46 +0900254 if (!valid_bank_found)
255 gd->ram_base = dram_map[i].base;
256
Masahiro Yamadaefcae8e2019-07-10 20:07:44 +0900257 prev_top = dram_map[i].base + dram_map[i].size;
258 valid_bank_found = true;
Masahiro Yamadaa90b1102016-03-29 20:18:45 +0900259 }
260
Masahiro Yamada5737e472018-01-06 22:59:26 +0900261 /*
262 * LD20 uses the last 64 byte for each channel for dynamic
263 * DDR PHY training
264 */
265 if (uniphier_get_soc_id() == UNIPHIER_LD20_ID)
266 gd->ram_size -= 64;
267
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900268 return 0;
269}
Masahiro Yamadaa90b1102016-03-29 20:18:45 +0900270
Simon Glass2f949c32017-03-31 08:40:32 -0600271int dram_init_banksize(void)
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900272{
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900273 struct uniphier_dram_map dram_map[3] = {};
Masahiro Yamadaf0f6a802019-07-10 20:07:45 +0900274 unsigned long base, top;
275 bool valid_bank_found = false;
Masahiro Yamada12938bf2019-07-10 20:07:43 +0900276 int ret, i;
Masahiro Yamadad97c1cb2017-01-28 06:53:43 +0900277
Masahiro Yamada12938bf2019-07-10 20:07:43 +0900278 ret = uniphier_dram_map_get(dram_map);
279 if (ret)
280 return ret;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900281
Masahiro Yamada3dc80972017-02-05 10:52:12 +0900282 for (i = 0; i < ARRAY_SIZE(dram_map); i++) {
Masahiro Yamadaf0f6a802019-07-10 20:07:45 +0900283 if (i < ARRAY_SIZE(gd->bd->bi_dram)) {
284 gd->bd->bi_dram[i].start = dram_map[i].base;
285 gd->bd->bi_dram[i].size = dram_map[i].size;
286 }
287
288 if (!dram_map[i].size)
289 continue;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900290
Masahiro Yamadaf0f6a802019-07-10 20:07:45 +0900291 if (!valid_bank_found)
292 base = dram_map[i].base;
293 top = dram_map[i].base + dram_map[i].size;
294 valid_bank_found = true;
Masahiro Yamadab4782cd2015-09-11 20:17:49 +0900295 }
Simon Glass2f949c32017-03-31 08:40:32 -0600296
Masahiro Yamadaf0f6a802019-07-10 20:07:45 +0900297 if (!valid_bank_found)
298 return -EINVAL;
299
300 /* map all the DRAM regions */
301 uniphier_mem_map_init(base, top - base);
302
Simon Glass2f949c32017-03-31 08:40:32 -0600303 return 0;
Masahiro Yamada460483c2016-06-17 19:24:29 +0900304}