blob: 811684a9f882bd62bc3b37ee8a39d9f0697c23d5 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Michal Simek04b7e622015-01-15 10:01:51 +01002/*
3 * (C) Copyright 2014 - 2015 Xilinx, Inc.
4 * Michal Simek <michal.simek@xilinx.com>
Michal Simek04b7e622015-01-15 10:01:51 +01005 */
6
7#include <common.h>
Simon Glassa9dc0682019-12-28 10:44:59 -07008#include <time.h>
Michal Simek04b7e622015-01-15 10:01:51 +01009#include <asm/arch/hardware.h>
10#include <asm/arch/sys_proto.h>
Alexander Graf0e2088c2016-03-04 01:09:49 +010011#include <asm/armv8/mmu.h>
Michal Simek04b7e622015-01-15 10:01:51 +010012#include <asm/io.h>
Ibai Erkiagac8a3efa2019-09-27 11:37:01 +010013#include <zynqmp_firmware.h>
Ovidiu Panait2b618472020-03-29 20:57:40 +030014#include <asm/cache.h>
Michal Simek04b7e622015-01-15 10:01:51 +010015
16#define ZYNQ_SILICON_VER_MASK 0xF000
17#define ZYNQ_SILICON_VER_SHIFT 12
18
19DECLARE_GLOBAL_DATA_PTR;
20
Nitin Jain9bcc76f2018-04-20 12:30:40 +053021/*
22 * Number of filled static entries and also the first empty
23 * slot in zynqmp_mem_map.
24 */
25#define ZYNQMP_MEM_MAP_USED 4
26
Siva Durga Prasad Paladugucafb6312018-01-12 15:35:46 +053027#if !defined(CONFIG_ZYNQMP_NO_DDR)
Nitin Jain9bcc76f2018-04-20 12:30:40 +053028#define DRAM_BANKS CONFIG_NR_DRAM_BANKS
29#else
30#define DRAM_BANKS 0
31#endif
32
33#if defined(CONFIG_DEFINE_TCM_OCM_MMAP)
34#define TCM_MAP 1
35#else
36#define TCM_MAP 0
Siva Durga Prasad Paladugucafb6312018-01-12 15:35:46 +053037#endif
Nitin Jain9bcc76f2018-04-20 12:30:40 +053038
39/* +1 is end of list which needs to be empty */
40#define ZYNQMP_MEM_MAP_MAX (ZYNQMP_MEM_MAP_USED + DRAM_BANKS + TCM_MAP + 1)
41
42static struct mm_region zynqmp_mem_map[ZYNQMP_MEM_MAP_MAX] = {
Siva Durga Prasad Paladugucafb6312018-01-12 15:35:46 +053043 {
York Sunc7104e52016-06-24 16:46:22 -070044 .virt = 0x80000000UL,
45 .phys = 0x80000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010046 .size = 0x70000000UL,
47 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
48 PTE_BLOCK_NON_SHARE |
49 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Nitin Jain9bcc76f2018-04-20 12:30:40 +053050 }, {
York Sunc7104e52016-06-24 16:46:22 -070051 .virt = 0xf8000000UL,
52 .phys = 0xf8000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010053 .size = 0x07e00000UL,
54 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
55 PTE_BLOCK_NON_SHARE |
56 PTE_BLOCK_PXN | PTE_BLOCK_UXN
57 }, {
York Sunc7104e52016-06-24 16:46:22 -070058 .virt = 0x400000000UL,
59 .phys = 0x400000000UL,
Anders Hedlundfcc09922017-12-19 17:24:41 +010060 .size = 0x400000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010061 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
62 PTE_BLOCK_NON_SHARE |
63 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Nitin Jain9bcc76f2018-04-20 12:30:40 +053064 }, {
Anders Hedlundfcc09922017-12-19 17:24:41 +010065 .virt = 0x1000000000UL,
66 .phys = 0x1000000000UL,
67 .size = 0xf000000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010068 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
69 PTE_BLOCK_NON_SHARE |
70 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Alexander Graf0e2088c2016-03-04 01:09:49 +010071 }
72};
Nitin Jain9bcc76f2018-04-20 12:30:40 +053073
74void mem_map_fill(void)
75{
76 int banks = ZYNQMP_MEM_MAP_USED;
77
78#if defined(CONFIG_DEFINE_TCM_OCM_MMAP)
79 zynqmp_mem_map[banks].virt = 0xffe00000UL;
80 zynqmp_mem_map[banks].phys = 0xffe00000UL;
81 zynqmp_mem_map[banks].size = 0x00200000UL;
82 zynqmp_mem_map[banks].attrs = PTE_BLOCK_MEMTYPE(MT_NORMAL) |
83 PTE_BLOCK_INNER_SHARE;
84 banks = banks + 1;
85#endif
86
87#if !defined(CONFIG_ZYNQMP_NO_DDR)
88 for (int i = 0; i < CONFIG_NR_DRAM_BANKS; i++) {
89 /* Zero size means no more DDR that's this is end */
90 if (!gd->bd->bi_dram[i].size)
91 break;
92
93 zynqmp_mem_map[banks].virt = gd->bd->bi_dram[i].start;
94 zynqmp_mem_map[banks].phys = gd->bd->bi_dram[i].start;
95 zynqmp_mem_map[banks].size = gd->bd->bi_dram[i].size;
96 zynqmp_mem_map[banks].attrs = PTE_BLOCK_MEMTYPE(MT_NORMAL) |
97 PTE_BLOCK_INNER_SHARE;
98 banks = banks + 1;
99 }
100#endif
101}
102
Alexander Graf0e2088c2016-03-04 01:09:49 +0100103struct mm_region *mem_map = zynqmp_mem_map;
104
Michal Simek1a2d5e22016-05-30 10:41:26 +0200105u64 get_page_table_size(void)
106{
107 return 0x14000;
108}
109
Siva Durga Prasad Paladugu48eaa0c2018-10-05 15:09:05 +0530110#if defined(CONFIG_SYS_MEM_RSVD_FOR_MMU) || defined(CONFIG_DEFINE_TCM_OCM_MMAP)
111void tcm_init(u8 mode)
Siva Durga Prasad Paladugu4628c502017-07-13 19:01:11 +0530112{
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530113 puts("WARNING: Initializing TCM overwrites TCM content\n");
114 initialize_tcm(mode);
Siva Durga Prasad Paladugu4628c502017-07-13 19:01:11 +0530115 memset((void *)ZYNQMP_TCM_BASE_ADDR, 0, ZYNQMP_TCM_SIZE);
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530116}
Siva Durga Prasad Paladugu48eaa0c2018-10-05 15:09:05 +0530117#endif
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530118
Siva Durga Prasad Paladugu48eaa0c2018-10-05 15:09:05 +0530119#ifdef CONFIG_SYS_MEM_RSVD_FOR_MMU
Ovidiu Panait2b618472020-03-29 20:57:40 +0300120int arm_reserve_mmu(void)
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530121{
122 tcm_init(TCM_LOCK);
Siva Durga Prasad Paladugu4628c502017-07-13 19:01:11 +0530123 gd->arch.tlb_size = PGTABLE_SIZE;
124 gd->arch.tlb_addr = ZYNQMP_TCM_BASE_ADDR;
125
126 return 0;
127}
128#endif
129
Michal Simekc23d3f82015-11-05 08:34:35 +0100130static unsigned int zynqmp_get_silicon_version_secure(void)
131{
132 u32 ver;
133
134 ver = readl(&csu_base->version);
135 ver &= ZYNQMP_SILICON_VER_MASK;
136 ver >>= ZYNQMP_SILICON_VER_SHIFT;
137
138 return ver;
139}
140
Michal Simek04b7e622015-01-15 10:01:51 +0100141unsigned int zynqmp_get_silicon_version(void)
142{
Michal Simekc23d3f82015-11-05 08:34:35 +0100143 if (current_el() == 3)
144 return zynqmp_get_silicon_version_secure();
145
Michal Simek04b7e622015-01-15 10:01:51 +0100146 gd->cpu_clk = get_tbclk();
147
148 switch (gd->cpu_clk) {
149 case 50000000:
150 return ZYNQMP_CSU_VERSION_QEMU;
151 }
152
Michal Simek8d2c02d2015-08-20 14:01:39 +0200153 return ZYNQMP_CSU_VERSION_SILICON;
Michal Simek04b7e622015-01-15 10:01:51 +0100154}
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530155
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530156static int zynqmp_mmio_rawwrite(const u32 address,
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530157 const u32 mask,
158 const u32 value)
159{
160 u32 data;
161 u32 value_local = value;
Michal Simekfaac0ce2018-06-13 10:38:33 +0200162 int ret;
163
164 ret = zynqmp_mmio_read(address, &data);
165 if (ret)
166 return ret;
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530167
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530168 data &= ~mask;
169 value_local &= mask;
170 value_local |= data;
171 writel(value_local, (ulong)address);
172 return 0;
173}
174
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530175static int zynqmp_mmio_rawread(const u32 address, u32 *value)
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530176{
177 *value = readl((ulong)address);
178 return 0;
179}
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530180
181int zynqmp_mmio_write(const u32 address,
182 const u32 mask,
183 const u32 value)
184{
185 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3)
186 return zynqmp_mmio_rawwrite(address, mask, value);
Michal Simek81efd2a2019-10-04 15:45:29 +0200187#if defined(CONFIG_ZYNQMP_FIRMWARE)
Heinrich Schuchardt9f92f792017-10-13 01:14:27 +0200188 else
Michal Simek4c3de372019-10-04 15:35:45 +0200189 return xilinx_pm_request(PM_MMIO_WRITE, address, mask,
190 value, 0, NULL);
Michal Simek81efd2a2019-10-04 15:45:29 +0200191#endif
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530192
193 return -EINVAL;
194}
195
196int zynqmp_mmio_read(const u32 address, u32 *value)
197{
Michal Simek81efd2a2019-10-04 15:45:29 +0200198 u32 ret = -EINVAL;
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530199
200 if (!value)
Michal Simek81efd2a2019-10-04 15:45:29 +0200201 return ret;
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530202
203 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3) {
204 ret = zynqmp_mmio_rawread(address, value);
Michal Simek81efd2a2019-10-04 15:45:29 +0200205 }
206#if defined(CONFIG_ZYNQMP_FIRMWARE)
207 else {
208 u32 ret_payload[PAYLOAD_ARG_CNT];
209
Michal Simek4c3de372019-10-04 15:35:45 +0200210 ret = xilinx_pm_request(PM_MMIO_READ, address, 0, 0,
211 0, ret_payload);
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530212 *value = ret_payload[1];
213 }
Michal Simek81efd2a2019-10-04 15:45:29 +0200214#endif
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530215
216 return ret;
217}