blob: 07668c94689152c891c47a0674959a36e2542cd3 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Michal Simek04b7e622015-01-15 10:01:51 +01002/*
3 * (C) Copyright 2014 - 2015 Xilinx, Inc.
Michal Simeka8c94362023-07-10 14:35:49 +02004 * Michal Simek <michal.simek@amd.com>
Michal Simek04b7e622015-01-15 10:01:51 +01005 */
6
Simon Glass97589732020-05-10 11:40:02 -06007#include <init.h>
Simon Glassa9dc0682019-12-28 10:44:59 -07008#include <time.h>
Tom Rinidec7ea02024-05-20 13:35:03 -06009#include <linux/errno.h>
10#include <linux/types.h>
Michal Simek04b7e622015-01-15 10:01:51 +010011#include <asm/arch/hardware.h>
12#include <asm/arch/sys_proto.h>
Alexander Graf0e2088c2016-03-04 01:09:49 +010013#include <asm/armv8/mmu.h>
Simon Glass274e0b02020-05-10 11:39:56 -060014#include <asm/cache.h>
Simon Glass3ba929a2020-10-30 21:38:53 -060015#include <asm/global_data.h>
Michal Simek04b7e622015-01-15 10:01:51 +010016#include <asm/io.h>
Ibai Erkiagac8a3efa2019-09-27 11:37:01 +010017#include <zynqmp_firmware.h>
Ovidiu Panait2b618472020-03-29 20:57:40 +030018#include <asm/cache.h>
T Karthik Reddy501c2062021-08-10 06:50:18 -060019#include <dm/platdata.h>
Michal Simek04b7e622015-01-15 10:01:51 +010020
21#define ZYNQ_SILICON_VER_MASK 0xF000
22#define ZYNQ_SILICON_VER_SHIFT 12
23
24DECLARE_GLOBAL_DATA_PTR;
25
Nitin Jain9bcc76f2018-04-20 12:30:40 +053026/*
27 * Number of filled static entries and also the first empty
28 * slot in zynqmp_mem_map.
29 */
30#define ZYNQMP_MEM_MAP_USED 4
31
Siva Durga Prasad Paladugucafb6312018-01-12 15:35:46 +053032#if !defined(CONFIG_ZYNQMP_NO_DDR)
Nitin Jain9bcc76f2018-04-20 12:30:40 +053033#define DRAM_BANKS CONFIG_NR_DRAM_BANKS
34#else
35#define DRAM_BANKS 0
36#endif
37
38#if defined(CONFIG_DEFINE_TCM_OCM_MMAP)
39#define TCM_MAP 1
40#else
41#define TCM_MAP 0
Siva Durga Prasad Paladugucafb6312018-01-12 15:35:46 +053042#endif
Nitin Jain9bcc76f2018-04-20 12:30:40 +053043
44/* +1 is end of list which needs to be empty */
45#define ZYNQMP_MEM_MAP_MAX (ZYNQMP_MEM_MAP_USED + DRAM_BANKS + TCM_MAP + 1)
46
47static struct mm_region zynqmp_mem_map[ZYNQMP_MEM_MAP_MAX] = {
Siva Durga Prasad Paladugucafb6312018-01-12 15:35:46 +053048 {
York Sunc7104e52016-06-24 16:46:22 -070049 .virt = 0x80000000UL,
50 .phys = 0x80000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010051 .size = 0x70000000UL,
52 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
53 PTE_BLOCK_NON_SHARE |
54 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Nitin Jain9bcc76f2018-04-20 12:30:40 +053055 }, {
York Sunc7104e52016-06-24 16:46:22 -070056 .virt = 0xf8000000UL,
57 .phys = 0xf8000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010058 .size = 0x07e00000UL,
59 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
60 PTE_BLOCK_NON_SHARE |
61 PTE_BLOCK_PXN | PTE_BLOCK_UXN
62 }, {
York Sunc7104e52016-06-24 16:46:22 -070063 .virt = 0x400000000UL,
64 .phys = 0x400000000UL,
Anders Hedlundfcc09922017-12-19 17:24:41 +010065 .size = 0x400000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010066 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
67 PTE_BLOCK_NON_SHARE |
68 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Nitin Jain9bcc76f2018-04-20 12:30:40 +053069 }, {
Anders Hedlundfcc09922017-12-19 17:24:41 +010070 .virt = 0x1000000000UL,
71 .phys = 0x1000000000UL,
72 .size = 0xf000000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010073 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
74 PTE_BLOCK_NON_SHARE |
75 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Alexander Graf0e2088c2016-03-04 01:09:49 +010076 }
77};
Nitin Jain9bcc76f2018-04-20 12:30:40 +053078
79void mem_map_fill(void)
80{
81 int banks = ZYNQMP_MEM_MAP_USED;
82
83#if defined(CONFIG_DEFINE_TCM_OCM_MMAP)
84 zynqmp_mem_map[banks].virt = 0xffe00000UL;
85 zynqmp_mem_map[banks].phys = 0xffe00000UL;
86 zynqmp_mem_map[banks].size = 0x00200000UL;
87 zynqmp_mem_map[banks].attrs = PTE_BLOCK_MEMTYPE(MT_NORMAL) |
88 PTE_BLOCK_INNER_SHARE;
89 banks = banks + 1;
90#endif
91
92#if !defined(CONFIG_ZYNQMP_NO_DDR)
93 for (int i = 0; i < CONFIG_NR_DRAM_BANKS; i++) {
94 /* Zero size means no more DDR that's this is end */
95 if (!gd->bd->bi_dram[i].size)
96 break;
97
98 zynqmp_mem_map[banks].virt = gd->bd->bi_dram[i].start;
99 zynqmp_mem_map[banks].phys = gd->bd->bi_dram[i].start;
100 zynqmp_mem_map[banks].size = gd->bd->bi_dram[i].size;
101 zynqmp_mem_map[banks].attrs = PTE_BLOCK_MEMTYPE(MT_NORMAL) |
102 PTE_BLOCK_INNER_SHARE;
103 banks = banks + 1;
104 }
105#endif
106}
107
Alexander Graf0e2088c2016-03-04 01:09:49 +0100108struct mm_region *mem_map = zynqmp_mem_map;
109
Michal Simek1a2d5e22016-05-30 10:41:26 +0200110u64 get_page_table_size(void)
111{
112 return 0x14000;
113}
114
Siva Durga Prasad Paladugu48eaa0c2018-10-05 15:09:05 +0530115#if defined(CONFIG_SYS_MEM_RSVD_FOR_MMU) || defined(CONFIG_DEFINE_TCM_OCM_MMAP)
116void tcm_init(u8 mode)
Siva Durga Prasad Paladugu4628c502017-07-13 19:01:11 +0530117{
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530118 puts("WARNING: Initializing TCM overwrites TCM content\n");
119 initialize_tcm(mode);
Siva Durga Prasad Paladugu4628c502017-07-13 19:01:11 +0530120 memset((void *)ZYNQMP_TCM_BASE_ADDR, 0, ZYNQMP_TCM_SIZE);
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530121}
Siva Durga Prasad Paladugu48eaa0c2018-10-05 15:09:05 +0530122#endif
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530123
Siva Durga Prasad Paladugu48eaa0c2018-10-05 15:09:05 +0530124#ifdef CONFIG_SYS_MEM_RSVD_FOR_MMU
Ovidiu Panait2b618472020-03-29 20:57:40 +0300125int arm_reserve_mmu(void)
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530126{
127 tcm_init(TCM_LOCK);
Siva Durga Prasad Paladugu4628c502017-07-13 19:01:11 +0530128 gd->arch.tlb_size = PGTABLE_SIZE;
129 gd->arch.tlb_addr = ZYNQMP_TCM_BASE_ADDR;
130
131 return 0;
132}
133#endif
134
Michal Simekc23d3f82015-11-05 08:34:35 +0100135static unsigned int zynqmp_get_silicon_version_secure(void)
136{
137 u32 ver;
138
139 ver = readl(&csu_base->version);
140 ver &= ZYNQMP_SILICON_VER_MASK;
141 ver >>= ZYNQMP_SILICON_VER_SHIFT;
142
143 return ver;
144}
145
Michal Simek04b7e622015-01-15 10:01:51 +0100146unsigned int zynqmp_get_silicon_version(void)
147{
Michal Simekc23d3f82015-11-05 08:34:35 +0100148 if (current_el() == 3)
149 return zynqmp_get_silicon_version_secure();
150
Michal Simek04b7e622015-01-15 10:01:51 +0100151 gd->cpu_clk = get_tbclk();
152
153 switch (gd->cpu_clk) {
154 case 50000000:
155 return ZYNQMP_CSU_VERSION_QEMU;
156 }
157
Michal Simek8d2c02d2015-08-20 14:01:39 +0200158 return ZYNQMP_CSU_VERSION_SILICON;
Michal Simek04b7e622015-01-15 10:01:51 +0100159}
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530160
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530161static int zynqmp_mmio_rawwrite(const u32 address,
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530162 const u32 mask,
163 const u32 value)
164{
165 u32 data;
166 u32 value_local = value;
Michal Simekfaac0ce2018-06-13 10:38:33 +0200167 int ret;
168
169 ret = zynqmp_mmio_read(address, &data);
170 if (ret)
171 return ret;
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530172
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530173 data &= ~mask;
174 value_local &= mask;
175 value_local |= data;
176 writel(value_local, (ulong)address);
177 return 0;
178}
179
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530180static int zynqmp_mmio_rawread(const u32 address, u32 *value)
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530181{
182 *value = readl((ulong)address);
183 return 0;
184}
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530185
186int zynqmp_mmio_write(const u32 address,
187 const u32 mask,
188 const u32 value)
189{
190 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3)
191 return zynqmp_mmio_rawwrite(address, mask, value);
Michal Simek81efd2a2019-10-04 15:45:29 +0200192#if defined(CONFIG_ZYNQMP_FIRMWARE)
Heinrich Schuchardt9f92f792017-10-13 01:14:27 +0200193 else
Michal Simek4c3de372019-10-04 15:35:45 +0200194 return xilinx_pm_request(PM_MMIO_WRITE, address, mask,
195 value, 0, NULL);
Michal Simek81efd2a2019-10-04 15:45:29 +0200196#endif
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530197
198 return -EINVAL;
199}
200
201int zynqmp_mmio_read(const u32 address, u32 *value)
202{
Michal Simek81efd2a2019-10-04 15:45:29 +0200203 u32 ret = -EINVAL;
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530204
205 if (!value)
Michal Simek81efd2a2019-10-04 15:45:29 +0200206 return ret;
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530207
208 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3) {
209 ret = zynqmp_mmio_rawread(address, value);
Michal Simek81efd2a2019-10-04 15:45:29 +0200210 }
211#if defined(CONFIG_ZYNQMP_FIRMWARE)
212 else {
213 u32 ret_payload[PAYLOAD_ARG_CNT];
214
Michal Simek4c3de372019-10-04 15:35:45 +0200215 ret = xilinx_pm_request(PM_MMIO_READ, address, 0, 0,
216 0, ret_payload);
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530217 *value = ret_payload[1];
218 }
Michal Simek81efd2a2019-10-04 15:45:29 +0200219#endif
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530220
221 return ret;
222}
T Karthik Reddy501c2062021-08-10 06:50:18 -0600223
224U_BOOT_DRVINFO(soc_xilinx_zynqmp) = {
225 .name = "soc_xilinx_zynqmp",
226};