blob: 26e285c24fe00b59bf7f4337d5171b849e11927c [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Michal Simek04b7e622015-01-15 10:01:51 +01002/*
3 * (C) Copyright 2014 - 2015 Xilinx, Inc.
4 * Michal Simek <michal.simek@xilinx.com>
Michal Simek04b7e622015-01-15 10:01:51 +01005 */
6
7#include <common.h>
Simon Glass97589732020-05-10 11:40:02 -06008#include <init.h>
Simon Glassa9dc0682019-12-28 10:44:59 -07009#include <time.h>
Michal Simek04b7e622015-01-15 10:01:51 +010010#include <asm/arch/hardware.h>
11#include <asm/arch/sys_proto.h>
Alexander Graf0e2088c2016-03-04 01:09:49 +010012#include <asm/armv8/mmu.h>
Simon Glass274e0b02020-05-10 11:39:56 -060013#include <asm/cache.h>
Simon Glass3ba929a2020-10-30 21:38:53 -060014#include <asm/global_data.h>
Michal Simek04b7e622015-01-15 10:01:51 +010015#include <asm/io.h>
Ibai Erkiagac8a3efa2019-09-27 11:37:01 +010016#include <zynqmp_firmware.h>
Ovidiu Panait2b618472020-03-29 20:57:40 +030017#include <asm/cache.h>
T Karthik Reddy501c2062021-08-10 06:50:18 -060018#include <dm/platdata.h>
Michal Simek04b7e622015-01-15 10:01:51 +010019
20#define ZYNQ_SILICON_VER_MASK 0xF000
21#define ZYNQ_SILICON_VER_SHIFT 12
22
23DECLARE_GLOBAL_DATA_PTR;
24
Nitin Jain9bcc76f2018-04-20 12:30:40 +053025/*
26 * Number of filled static entries and also the first empty
27 * slot in zynqmp_mem_map.
28 */
29#define ZYNQMP_MEM_MAP_USED 4
30
Siva Durga Prasad Paladugucafb6312018-01-12 15:35:46 +053031#if !defined(CONFIG_ZYNQMP_NO_DDR)
Nitin Jain9bcc76f2018-04-20 12:30:40 +053032#define DRAM_BANKS CONFIG_NR_DRAM_BANKS
33#else
34#define DRAM_BANKS 0
35#endif
36
37#if defined(CONFIG_DEFINE_TCM_OCM_MMAP)
38#define TCM_MAP 1
39#else
40#define TCM_MAP 0
Siva Durga Prasad Paladugucafb6312018-01-12 15:35:46 +053041#endif
Nitin Jain9bcc76f2018-04-20 12:30:40 +053042
43/* +1 is end of list which needs to be empty */
44#define ZYNQMP_MEM_MAP_MAX (ZYNQMP_MEM_MAP_USED + DRAM_BANKS + TCM_MAP + 1)
45
46static struct mm_region zynqmp_mem_map[ZYNQMP_MEM_MAP_MAX] = {
Siva Durga Prasad Paladugucafb6312018-01-12 15:35:46 +053047 {
York Sunc7104e52016-06-24 16:46:22 -070048 .virt = 0x80000000UL,
49 .phys = 0x80000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010050 .size = 0x70000000UL,
51 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
52 PTE_BLOCK_NON_SHARE |
53 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Nitin Jain9bcc76f2018-04-20 12:30:40 +053054 }, {
York Sunc7104e52016-06-24 16:46:22 -070055 .virt = 0xf8000000UL,
56 .phys = 0xf8000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010057 .size = 0x07e00000UL,
58 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
59 PTE_BLOCK_NON_SHARE |
60 PTE_BLOCK_PXN | PTE_BLOCK_UXN
61 }, {
York Sunc7104e52016-06-24 16:46:22 -070062 .virt = 0x400000000UL,
63 .phys = 0x400000000UL,
Anders Hedlundfcc09922017-12-19 17:24:41 +010064 .size = 0x400000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010065 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
66 PTE_BLOCK_NON_SHARE |
67 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Nitin Jain9bcc76f2018-04-20 12:30:40 +053068 }, {
Anders Hedlundfcc09922017-12-19 17:24:41 +010069 .virt = 0x1000000000UL,
70 .phys = 0x1000000000UL,
71 .size = 0xf000000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010072 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
73 PTE_BLOCK_NON_SHARE |
74 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Alexander Graf0e2088c2016-03-04 01:09:49 +010075 }
76};
Nitin Jain9bcc76f2018-04-20 12:30:40 +053077
78void mem_map_fill(void)
79{
80 int banks = ZYNQMP_MEM_MAP_USED;
81
82#if defined(CONFIG_DEFINE_TCM_OCM_MMAP)
83 zynqmp_mem_map[banks].virt = 0xffe00000UL;
84 zynqmp_mem_map[banks].phys = 0xffe00000UL;
85 zynqmp_mem_map[banks].size = 0x00200000UL;
86 zynqmp_mem_map[banks].attrs = PTE_BLOCK_MEMTYPE(MT_NORMAL) |
87 PTE_BLOCK_INNER_SHARE;
88 banks = banks + 1;
89#endif
90
91#if !defined(CONFIG_ZYNQMP_NO_DDR)
92 for (int i = 0; i < CONFIG_NR_DRAM_BANKS; i++) {
93 /* Zero size means no more DDR that's this is end */
94 if (!gd->bd->bi_dram[i].size)
95 break;
96
97 zynqmp_mem_map[banks].virt = gd->bd->bi_dram[i].start;
98 zynqmp_mem_map[banks].phys = gd->bd->bi_dram[i].start;
99 zynqmp_mem_map[banks].size = gd->bd->bi_dram[i].size;
100 zynqmp_mem_map[banks].attrs = PTE_BLOCK_MEMTYPE(MT_NORMAL) |
101 PTE_BLOCK_INNER_SHARE;
102 banks = banks + 1;
103 }
104#endif
105}
106
Alexander Graf0e2088c2016-03-04 01:09:49 +0100107struct mm_region *mem_map = zynqmp_mem_map;
108
Michal Simek1a2d5e22016-05-30 10:41:26 +0200109u64 get_page_table_size(void)
110{
111 return 0x14000;
112}
113
Siva Durga Prasad Paladugu48eaa0c2018-10-05 15:09:05 +0530114#if defined(CONFIG_SYS_MEM_RSVD_FOR_MMU) || defined(CONFIG_DEFINE_TCM_OCM_MMAP)
115void tcm_init(u8 mode)
Siva Durga Prasad Paladugu4628c502017-07-13 19:01:11 +0530116{
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530117 puts("WARNING: Initializing TCM overwrites TCM content\n");
118 initialize_tcm(mode);
Siva Durga Prasad Paladugu4628c502017-07-13 19:01:11 +0530119 memset((void *)ZYNQMP_TCM_BASE_ADDR, 0, ZYNQMP_TCM_SIZE);
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530120}
Siva Durga Prasad Paladugu48eaa0c2018-10-05 15:09:05 +0530121#endif
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530122
Siva Durga Prasad Paladugu48eaa0c2018-10-05 15:09:05 +0530123#ifdef CONFIG_SYS_MEM_RSVD_FOR_MMU
Ovidiu Panait2b618472020-03-29 20:57:40 +0300124int arm_reserve_mmu(void)
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530125{
126 tcm_init(TCM_LOCK);
Siva Durga Prasad Paladugu4628c502017-07-13 19:01:11 +0530127 gd->arch.tlb_size = PGTABLE_SIZE;
128 gd->arch.tlb_addr = ZYNQMP_TCM_BASE_ADDR;
129
130 return 0;
131}
132#endif
133
Michal Simekc23d3f82015-11-05 08:34:35 +0100134static unsigned int zynqmp_get_silicon_version_secure(void)
135{
136 u32 ver;
137
138 ver = readl(&csu_base->version);
139 ver &= ZYNQMP_SILICON_VER_MASK;
140 ver >>= ZYNQMP_SILICON_VER_SHIFT;
141
142 return ver;
143}
144
Michal Simek04b7e622015-01-15 10:01:51 +0100145unsigned int zynqmp_get_silicon_version(void)
146{
Michal Simekc23d3f82015-11-05 08:34:35 +0100147 if (current_el() == 3)
148 return zynqmp_get_silicon_version_secure();
149
Michal Simek04b7e622015-01-15 10:01:51 +0100150 gd->cpu_clk = get_tbclk();
151
152 switch (gd->cpu_clk) {
153 case 50000000:
154 return ZYNQMP_CSU_VERSION_QEMU;
155 }
156
Michal Simek8d2c02d2015-08-20 14:01:39 +0200157 return ZYNQMP_CSU_VERSION_SILICON;
Michal Simek04b7e622015-01-15 10:01:51 +0100158}
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530159
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530160static int zynqmp_mmio_rawwrite(const u32 address,
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530161 const u32 mask,
162 const u32 value)
163{
164 u32 data;
165 u32 value_local = value;
Michal Simekfaac0ce2018-06-13 10:38:33 +0200166 int ret;
167
168 ret = zynqmp_mmio_read(address, &data);
169 if (ret)
170 return ret;
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530171
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530172 data &= ~mask;
173 value_local &= mask;
174 value_local |= data;
175 writel(value_local, (ulong)address);
176 return 0;
177}
178
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530179static int zynqmp_mmio_rawread(const u32 address, u32 *value)
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530180{
181 *value = readl((ulong)address);
182 return 0;
183}
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530184
185int zynqmp_mmio_write(const u32 address,
186 const u32 mask,
187 const u32 value)
188{
189 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3)
190 return zynqmp_mmio_rawwrite(address, mask, value);
Michal Simek81efd2a2019-10-04 15:45:29 +0200191#if defined(CONFIG_ZYNQMP_FIRMWARE)
Heinrich Schuchardt9f92f792017-10-13 01:14:27 +0200192 else
Michal Simek4c3de372019-10-04 15:35:45 +0200193 return xilinx_pm_request(PM_MMIO_WRITE, address, mask,
194 value, 0, NULL);
Michal Simek81efd2a2019-10-04 15:45:29 +0200195#endif
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530196
197 return -EINVAL;
198}
199
200int zynqmp_mmio_read(const u32 address, u32 *value)
201{
Michal Simek81efd2a2019-10-04 15:45:29 +0200202 u32 ret = -EINVAL;
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530203
204 if (!value)
Michal Simek81efd2a2019-10-04 15:45:29 +0200205 return ret;
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530206
207 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3) {
208 ret = zynqmp_mmio_rawread(address, value);
Michal Simek81efd2a2019-10-04 15:45:29 +0200209 }
210#if defined(CONFIG_ZYNQMP_FIRMWARE)
211 else {
212 u32 ret_payload[PAYLOAD_ARG_CNT];
213
Michal Simek4c3de372019-10-04 15:35:45 +0200214 ret = xilinx_pm_request(PM_MMIO_READ, address, 0, 0,
215 0, ret_payload);
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530216 *value = ret_payload[1];
217 }
Michal Simek81efd2a2019-10-04 15:45:29 +0200218#endif
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530219
220 return ret;
221}
T Karthik Reddy501c2062021-08-10 06:50:18 -0600222
223U_BOOT_DRVINFO(soc_xilinx_zynqmp) = {
224 .name = "soc_xilinx_zynqmp",
225};