blob: 3eb8e8f44875176ab7ce43f3dcf9a5f7560909f1 [file] [log] [blame]
Tom Rini10e47792018-05-06 17:58:06 -04001// SPDX-License-Identifier: GPL-2.0+
Michal Simek04b7e622015-01-15 10:01:51 +01002/*
3 * (C) Copyright 2014 - 2015 Xilinx, Inc.
4 * Michal Simek <michal.simek@xilinx.com>
Michal Simek04b7e622015-01-15 10:01:51 +01005 */
6
7#include <common.h>
Simon Glass97589732020-05-10 11:40:02 -06008#include <init.h>
Simon Glassa9dc0682019-12-28 10:44:59 -07009#include <time.h>
Michal Simek04b7e622015-01-15 10:01:51 +010010#include <asm/arch/hardware.h>
11#include <asm/arch/sys_proto.h>
Alexander Graf0e2088c2016-03-04 01:09:49 +010012#include <asm/armv8/mmu.h>
Simon Glass274e0b02020-05-10 11:39:56 -060013#include <asm/cache.h>
Michal Simek04b7e622015-01-15 10:01:51 +010014#include <asm/io.h>
Ibai Erkiagac8a3efa2019-09-27 11:37:01 +010015#include <zynqmp_firmware.h>
Ovidiu Panait2b618472020-03-29 20:57:40 +030016#include <asm/cache.h>
Michal Simek04b7e622015-01-15 10:01:51 +010017
18#define ZYNQ_SILICON_VER_MASK 0xF000
19#define ZYNQ_SILICON_VER_SHIFT 12
20
21DECLARE_GLOBAL_DATA_PTR;
22
Nitin Jain9bcc76f2018-04-20 12:30:40 +053023/*
24 * Number of filled static entries and also the first empty
25 * slot in zynqmp_mem_map.
26 */
27#define ZYNQMP_MEM_MAP_USED 4
28
Siva Durga Prasad Paladugucafb6312018-01-12 15:35:46 +053029#if !defined(CONFIG_ZYNQMP_NO_DDR)
Nitin Jain9bcc76f2018-04-20 12:30:40 +053030#define DRAM_BANKS CONFIG_NR_DRAM_BANKS
31#else
32#define DRAM_BANKS 0
33#endif
34
35#if defined(CONFIG_DEFINE_TCM_OCM_MMAP)
36#define TCM_MAP 1
37#else
38#define TCM_MAP 0
Siva Durga Prasad Paladugucafb6312018-01-12 15:35:46 +053039#endif
Nitin Jain9bcc76f2018-04-20 12:30:40 +053040
41/* +1 is end of list which needs to be empty */
42#define ZYNQMP_MEM_MAP_MAX (ZYNQMP_MEM_MAP_USED + DRAM_BANKS + TCM_MAP + 1)
43
44static struct mm_region zynqmp_mem_map[ZYNQMP_MEM_MAP_MAX] = {
Siva Durga Prasad Paladugucafb6312018-01-12 15:35:46 +053045 {
York Sunc7104e52016-06-24 16:46:22 -070046 .virt = 0x80000000UL,
47 .phys = 0x80000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010048 .size = 0x70000000UL,
49 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
50 PTE_BLOCK_NON_SHARE |
51 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Nitin Jain9bcc76f2018-04-20 12:30:40 +053052 }, {
York Sunc7104e52016-06-24 16:46:22 -070053 .virt = 0xf8000000UL,
54 .phys = 0xf8000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010055 .size = 0x07e00000UL,
56 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
57 PTE_BLOCK_NON_SHARE |
58 PTE_BLOCK_PXN | PTE_BLOCK_UXN
59 }, {
York Sunc7104e52016-06-24 16:46:22 -070060 .virt = 0x400000000UL,
61 .phys = 0x400000000UL,
Anders Hedlundfcc09922017-12-19 17:24:41 +010062 .size = 0x400000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010063 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
64 PTE_BLOCK_NON_SHARE |
65 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Nitin Jain9bcc76f2018-04-20 12:30:40 +053066 }, {
Anders Hedlundfcc09922017-12-19 17:24:41 +010067 .virt = 0x1000000000UL,
68 .phys = 0x1000000000UL,
69 .size = 0xf000000000UL,
Alexander Graf0e2088c2016-03-04 01:09:49 +010070 .attrs = PTE_BLOCK_MEMTYPE(MT_DEVICE_NGNRNE) |
71 PTE_BLOCK_NON_SHARE |
72 PTE_BLOCK_PXN | PTE_BLOCK_UXN
Alexander Graf0e2088c2016-03-04 01:09:49 +010073 }
74};
Nitin Jain9bcc76f2018-04-20 12:30:40 +053075
76void mem_map_fill(void)
77{
78 int banks = ZYNQMP_MEM_MAP_USED;
79
80#if defined(CONFIG_DEFINE_TCM_OCM_MMAP)
81 zynqmp_mem_map[banks].virt = 0xffe00000UL;
82 zynqmp_mem_map[banks].phys = 0xffe00000UL;
83 zynqmp_mem_map[banks].size = 0x00200000UL;
84 zynqmp_mem_map[banks].attrs = PTE_BLOCK_MEMTYPE(MT_NORMAL) |
85 PTE_BLOCK_INNER_SHARE;
86 banks = banks + 1;
87#endif
88
89#if !defined(CONFIG_ZYNQMP_NO_DDR)
90 for (int i = 0; i < CONFIG_NR_DRAM_BANKS; i++) {
91 /* Zero size means no more DDR that's this is end */
92 if (!gd->bd->bi_dram[i].size)
93 break;
94
95 zynqmp_mem_map[banks].virt = gd->bd->bi_dram[i].start;
96 zynqmp_mem_map[banks].phys = gd->bd->bi_dram[i].start;
97 zynqmp_mem_map[banks].size = gd->bd->bi_dram[i].size;
98 zynqmp_mem_map[banks].attrs = PTE_BLOCK_MEMTYPE(MT_NORMAL) |
99 PTE_BLOCK_INNER_SHARE;
100 banks = banks + 1;
101 }
102#endif
103}
104
Alexander Graf0e2088c2016-03-04 01:09:49 +0100105struct mm_region *mem_map = zynqmp_mem_map;
106
Michal Simek1a2d5e22016-05-30 10:41:26 +0200107u64 get_page_table_size(void)
108{
109 return 0x14000;
110}
111
Siva Durga Prasad Paladugu48eaa0c2018-10-05 15:09:05 +0530112#if defined(CONFIG_SYS_MEM_RSVD_FOR_MMU) || defined(CONFIG_DEFINE_TCM_OCM_MMAP)
113void tcm_init(u8 mode)
Siva Durga Prasad Paladugu4628c502017-07-13 19:01:11 +0530114{
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530115 puts("WARNING: Initializing TCM overwrites TCM content\n");
116 initialize_tcm(mode);
Siva Durga Prasad Paladugu4628c502017-07-13 19:01:11 +0530117 memset((void *)ZYNQMP_TCM_BASE_ADDR, 0, ZYNQMP_TCM_SIZE);
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530118}
Siva Durga Prasad Paladugu48eaa0c2018-10-05 15:09:05 +0530119#endif
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530120
Siva Durga Prasad Paladugu48eaa0c2018-10-05 15:09:05 +0530121#ifdef CONFIG_SYS_MEM_RSVD_FOR_MMU
Ovidiu Panait2b618472020-03-29 20:57:40 +0300122int arm_reserve_mmu(void)
Siva Durga Prasad Paladugua1ad8782018-10-05 15:09:04 +0530123{
124 tcm_init(TCM_LOCK);
Siva Durga Prasad Paladugu4628c502017-07-13 19:01:11 +0530125 gd->arch.tlb_size = PGTABLE_SIZE;
126 gd->arch.tlb_addr = ZYNQMP_TCM_BASE_ADDR;
127
128 return 0;
129}
130#endif
131
Michal Simekc23d3f82015-11-05 08:34:35 +0100132static unsigned int zynqmp_get_silicon_version_secure(void)
133{
134 u32 ver;
135
136 ver = readl(&csu_base->version);
137 ver &= ZYNQMP_SILICON_VER_MASK;
138 ver >>= ZYNQMP_SILICON_VER_SHIFT;
139
140 return ver;
141}
142
Michal Simek04b7e622015-01-15 10:01:51 +0100143unsigned int zynqmp_get_silicon_version(void)
144{
Michal Simekc23d3f82015-11-05 08:34:35 +0100145 if (current_el() == 3)
146 return zynqmp_get_silicon_version_secure();
147
Michal Simek04b7e622015-01-15 10:01:51 +0100148 gd->cpu_clk = get_tbclk();
149
150 switch (gd->cpu_clk) {
151 case 50000000:
152 return ZYNQMP_CSU_VERSION_QEMU;
153 }
154
Michal Simek8d2c02d2015-08-20 14:01:39 +0200155 return ZYNQMP_CSU_VERSION_SILICON;
Michal Simek04b7e622015-01-15 10:01:51 +0100156}
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530157
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530158static int zynqmp_mmio_rawwrite(const u32 address,
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530159 const u32 mask,
160 const u32 value)
161{
162 u32 data;
163 u32 value_local = value;
Michal Simekfaac0ce2018-06-13 10:38:33 +0200164 int ret;
165
166 ret = zynqmp_mmio_read(address, &data);
167 if (ret)
168 return ret;
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530169
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530170 data &= ~mask;
171 value_local &= mask;
172 value_local |= data;
173 writel(value_local, (ulong)address);
174 return 0;
175}
176
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530177static int zynqmp_mmio_rawread(const u32 address, u32 *value)
Siva Durga Prasad Paladugu0e39bd72017-02-02 01:10:46 +0530178{
179 *value = readl((ulong)address);
180 return 0;
181}
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530182
183int zynqmp_mmio_write(const u32 address,
184 const u32 mask,
185 const u32 value)
186{
187 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3)
188 return zynqmp_mmio_rawwrite(address, mask, value);
Michal Simek81efd2a2019-10-04 15:45:29 +0200189#if defined(CONFIG_ZYNQMP_FIRMWARE)
Heinrich Schuchardt9f92f792017-10-13 01:14:27 +0200190 else
Michal Simek4c3de372019-10-04 15:35:45 +0200191 return xilinx_pm_request(PM_MMIO_WRITE, address, mask,
192 value, 0, NULL);
Michal Simek81efd2a2019-10-04 15:45:29 +0200193#endif
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530194
195 return -EINVAL;
196}
197
198int zynqmp_mmio_read(const u32 address, u32 *value)
199{
Michal Simek81efd2a2019-10-04 15:45:29 +0200200 u32 ret = -EINVAL;
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530201
202 if (!value)
Michal Simek81efd2a2019-10-04 15:45:29 +0200203 return ret;
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530204
205 if (IS_ENABLED(CONFIG_SPL_BUILD) || current_el() == 3) {
206 ret = zynqmp_mmio_rawread(address, value);
Michal Simek81efd2a2019-10-04 15:45:29 +0200207 }
208#if defined(CONFIG_ZYNQMP_FIRMWARE)
209 else {
210 u32 ret_payload[PAYLOAD_ARG_CNT];
211
Michal Simek4c3de372019-10-04 15:35:45 +0200212 ret = xilinx_pm_request(PM_MMIO_READ, address, 0, 0,
213 0, ret_payload);
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530214 *value = ret_payload[1];
215 }
Michal Simek81efd2a2019-10-04 15:45:29 +0200216#endif
Siva Durga Prasad Paladugu668fdd42017-07-13 19:01:12 +0530217
218 return ret;
219}