blob: ca67f2a091f09cf89c3ae982ec56c5c3416b01f8 [file] [log] [blame]
Soby Mathew44170c42016-03-22 15:51:08 +00001/*
Antonio Nino Diaz49074492018-04-26 12:59:08 +01002 * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
Soby Mathew44170c42016-03-22 15:51:08 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Soby Mathew44170c42016-03-22 15:51:08 +00005 */
6
7#include <arch.h>
8#include <arch_helpers.h>
9#include <assert.h>
Soby Mathew44170c42016-03-22 15:51:08 +000010#include <cassert.h>
Antonio Nino Diazd1beee22016-12-13 15:28:54 +000011#include <common_def.h>
Soby Mathew44170c42016-03-22 15:51:08 +000012#include <debug.h>
13#include <platform_def.h>
Antonio Nino Diaz5c97bd12018-08-02 09:57:29 +010014#include <stdbool.h>
Antonio Nino Diaz4b32e622018-08-16 16:52:57 +010015#include <stdint.h>
Soby Mathew44170c42016-03-22 15:51:08 +000016#include <string.h>
Sandrine Bailleux7659a262016-07-05 09:55:03 +010017#include <utils.h>
Soby Mathew44170c42016-03-22 15:51:08 +000018#include <xlat_tables.h>
Antonio Nino Diazd1beee22016-12-13 15:28:54 +000019#include "xlat_tables_private.h"
Soby Mathew44170c42016-03-22 15:51:08 +000020
21#if LOG_LEVEL >= LOG_LEVEL_VERBOSE
22#define LVL0_SPACER ""
23#define LVL1_SPACER " "
24#define LVL2_SPACER " "
25#define LVL3_SPACER " "
26#define get_level_spacer(level) \
David Cunado8a354f12017-06-21 16:52:45 +010027 (((level) == U(0)) ? LVL0_SPACER : \
28 (((level) == U(1)) ? LVL1_SPACER : \
29 (((level) == U(2)) ? LVL2_SPACER : LVL3_SPACER)))
Antonio Nino Diaz00086e32018-08-16 16:46:06 +010030#define debug_print(...) printf(__VA_ARGS__)
Soby Mathew44170c42016-03-22 15:51:08 +000031#else
32#define debug_print(...) ((void)0)
33#endif
34
David Cunadoc1503122018-02-16 21:12:58 +000035#define UNSET_DESC ~0ULL
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +010036#define MT_UNKNOWN ~0U
Soby Mathew44170c42016-03-22 15:51:08 +000037
38static uint64_t xlat_tables[MAX_XLAT_TABLES][XLAT_TABLE_ENTRIES]
39 __aligned(XLAT_TABLE_SIZE) __section("xlat_table");
40
David Cunado8a354f12017-06-21 16:52:45 +010041static unsigned int next_xlat;
Soby Mathew44170c42016-03-22 15:51:08 +000042static unsigned long long xlat_max_pa;
43static uintptr_t xlat_max_va;
44
Antonio Nino Diazefabaa92017-04-27 13:30:22 +010045static uint64_t execute_never_mask;
Antonio Nino Diaz49074492018-04-26 12:59:08 +010046static uint64_t ap1_mask;
Antonio Nino Diazefabaa92017-04-27 13:30:22 +010047
Soby Mathew44170c42016-03-22 15:51:08 +000048/*
49 * Array of all memory regions stored in order of ascending base address.
50 * The list is terminated by the first entry with size == 0.
51 */
52static mmap_region_t mmap[MAX_MMAP_REGIONS + 1];
53
54
55void print_mmap(void)
56{
57#if LOG_LEVEL >= LOG_LEVEL_VERBOSE
58 debug_print("mmap:\n");
59 mmap_region_t *mm = mmap;
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +010060 while (mm->size != 0U) {
Soby Mathew44170c42016-03-22 15:51:08 +000061 debug_print(" VA:%p PA:0x%llx size:0x%zx attr:0x%x\n",
62 (void *)mm->base_va, mm->base_pa,
63 mm->size, mm->attr);
64 ++mm;
65 };
66 debug_print("\n");
67#endif
68}
69
70void mmap_add_region(unsigned long long base_pa, uintptr_t base_va,
Antonio Nino Diaz8643a812018-06-21 14:39:16 +010071 size_t size, unsigned int attr)
Soby Mathew44170c42016-03-22 15:51:08 +000072{
73 mmap_region_t *mm = mmap;
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +010074 const mmap_region_t *mm_last = mm + ARRAY_SIZE(mmap) - 1U;
75 unsigned long long end_pa = base_pa + size - 1U;
76 uintptr_t end_va = base_va + size - 1U;
Soby Mathew44170c42016-03-22 15:51:08 +000077
78 assert(IS_PAGE_ALIGNED(base_pa));
79 assert(IS_PAGE_ALIGNED(base_va));
80 assert(IS_PAGE_ALIGNED(size));
81
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +010082 if (size == 0U)
Soby Mathew44170c42016-03-22 15:51:08 +000083 return;
84
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +010085 assert(base_pa < end_pa); /* Check for overflows */
86 assert(base_va < end_va);
87
Antonio Nino Diazd1beee22016-12-13 15:28:54 +000088 assert((base_va + (uintptr_t)size - (uintptr_t)1) <=
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +010089 (PLAT_VIRT_ADDR_SPACE_SIZE - 1U));
Antonio Nino Diazd1beee22016-12-13 15:28:54 +000090 assert((base_pa + (unsigned long long)size - 1ULL) <=
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +010091 (PLAT_PHY_ADDR_SPACE_SIZE - 1U));
Antonio Nino Diazd1beee22016-12-13 15:28:54 +000092
Antonio Nino Diaz3759e3f2017-03-22 15:48:51 +000093#if ENABLE_ASSERTIONS
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +010094
95 /* Check for PAs and VAs overlaps with all other regions */
96 for (mm = mmap; mm->size; ++mm) {
97
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +010098 uintptr_t mm_end_va = mm->base_va + mm->size - 1U;
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +010099
100 /*
101 * Check if one of the regions is completely inside the other
102 * one.
103 */
Antonio Nino Diaz5c97bd12018-08-02 09:57:29 +0100104 bool fully_overlapped_va =
105 ((base_va >= mm->base_va) && (end_va <= mm_end_va)) ||
106 ((mm->base_va >= base_va) && (mm_end_va <= end_va));
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100107
108 /*
109 * Full VA overlaps are only allowed if both regions are
110 * identity mapped (zero offset) or have the same VA to PA
111 * offset. Also, make sure that it's not the exact same area.
112 */
Antonio Nino Diaz5c97bd12018-08-02 09:57:29 +0100113 if (fully_overlapped_va) {
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100114 assert((mm->base_va - mm->base_pa) ==
115 (base_va - base_pa));
116 assert((base_va != mm->base_va) || (size != mm->size));
117 } else {
118 /*
119 * If the regions do not have fully overlapping VAs,
120 * then they must have fully separated VAs and PAs.
121 * Partial overlaps are not allowed
122 */
123
124 unsigned long long mm_end_pa =
125 mm->base_pa + mm->size - 1;
126
Antonio Nino Diaz5c97bd12018-08-02 09:57:29 +0100127 bool separated_pa = (end_pa < mm->base_pa) ||
128 (base_pa > mm_end_pa);
129 bool separated_va = (end_va < mm->base_va) ||
130 (base_va > mm_end_va);
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100131
Antonio Nino Diaz5c97bd12018-08-02 09:57:29 +0100132 assert(separated_va && separated_pa);
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100133 }
134 }
135
136 mm = mmap; /* Restore pointer to the start of the array */
137
Antonio Nino Diaz3759e3f2017-03-22 15:48:51 +0000138#endif /* ENABLE_ASSERTIONS */
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100139
Soby Mathew44170c42016-03-22 15:51:08 +0000140 /* Find correct place in mmap to insert new region */
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100141 while ((mm->base_va < base_va) && (mm->size != 0U))
Soby Mathew44170c42016-03-22 15:51:08 +0000142 ++mm;
143
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100144 /*
145 * If a section is contained inside another one with the same base
146 * address, it must be placed after the one it is contained in:
147 *
148 * 1st |-----------------------|
149 * 2nd |------------|
150 * 3rd |------|
151 *
152 * This is required for mmap_region_attr() to get the attributes of the
153 * small region correctly.
154 */
155 while ((mm->base_va == base_va) && (mm->size > size))
156 ++mm;
157
Soby Mathew44170c42016-03-22 15:51:08 +0000158 /* Make room for new region by moving other regions up by one place */
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100159 (void)memmove(mm + 1, mm, (uintptr_t)mm_last - (uintptr_t)mm);
Soby Mathew44170c42016-03-22 15:51:08 +0000160
161 /* Check we haven't lost the empty sentinal from the end of the array */
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100162 assert(mm_last->size == 0U);
Soby Mathew44170c42016-03-22 15:51:08 +0000163
164 mm->base_pa = base_pa;
165 mm->base_va = base_va;
166 mm->size = size;
167 mm->attr = attr;
168
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100169 if (end_pa > xlat_max_pa)
170 xlat_max_pa = end_pa;
171 if (end_va > xlat_max_va)
172 xlat_max_va = end_va;
Soby Mathew44170c42016-03-22 15:51:08 +0000173}
174
175void mmap_add(const mmap_region_t *mm)
176{
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100177 const mmap_region_t *mm_cursor = mm;
178
179 while (mm_cursor->size != 0U) {
180 mmap_add_region(mm_cursor->base_pa, mm_cursor->base_va,
181 mm_cursor->size, mm_cursor->attr);
182 mm_cursor++;
Soby Mathew44170c42016-03-22 15:51:08 +0000183 }
184}
185
Antonio Nino Diaz8643a812018-06-21 14:39:16 +0100186static uint64_t mmap_desc(unsigned int attr, unsigned long long addr_pa,
187 unsigned int level)
Soby Mathew44170c42016-03-22 15:51:08 +0000188{
Sandrine Bailleux7528b682016-06-14 16:29:04 +0100189 uint64_t desc;
Soby Mathew44170c42016-03-22 15:51:08 +0000190 int mem_type;
191
Antonio Nino Diaz46a33a52016-12-08 16:03:46 +0000192 /* Make sure that the granularity is fine enough to map this address. */
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100193 assert((addr_pa & XLAT_BLOCK_MASK(level)) == 0U);
Antonio Nino Diaz46a33a52016-12-08 16:03:46 +0000194
Sandrine Bailleux7528b682016-06-14 16:29:04 +0100195 desc = addr_pa;
Antonio Nino Diazd48ae612016-08-02 09:21:41 +0100196 /*
197 * There are different translation table descriptors for level 3 and the
198 * rest.
199 */
200 desc |= (level == XLAT_TABLE_LEVEL_MAX) ? PAGE_DESC : BLOCK_DESC;
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100201 desc |= ((attr & MT_NS) != 0U) ? LOWER_ATTRS(NS) : 0U;
202 desc |= ((attr & MT_RW) != 0U) ? LOWER_ATTRS(AP_RW) : LOWER_ATTRS(AP_RO);
Antonio Nino Diaz0842bd62018-07-12 15:54:10 +0100203 /*
204 * Always set the access flag, as this library assumes access flag
205 * faults aren't managed.
206 */
Soby Mathew44170c42016-03-22 15:51:08 +0000207 desc |= LOWER_ATTRS(ACCESS_FLAG);
Antonio Nino Diaz49074492018-04-26 12:59:08 +0100208 desc |= ap1_mask;
Soby Mathew44170c42016-03-22 15:51:08 +0000209
Sandrine Bailleux7528b682016-06-14 16:29:04 +0100210 /*
211 * Deduce shareability domain and executability of the memory region
212 * from the memory type.
213 *
214 * Data accesses to device memory and non-cacheable normal memory are
215 * coherent for all observers in the system, and correspondingly are
216 * always treated as being Outer Shareable. Therefore, for these 2 types
217 * of memory, it is not strictly needed to set the shareability field
218 * in the translation tables.
219 */
Soby Mathew44170c42016-03-22 15:51:08 +0000220 mem_type = MT_TYPE(attr);
Sandrine Bailleux7528b682016-06-14 16:29:04 +0100221 if (mem_type == MT_DEVICE) {
Soby Mathew44170c42016-03-22 15:51:08 +0000222 desc |= LOWER_ATTRS(ATTR_DEVICE_INDEX | OSH);
Sandrine Bailleux7528b682016-06-14 16:29:04 +0100223 /*
224 * Always map device memory as execute-never.
225 * This is to avoid the possibility of a speculative instruction
226 * fetch, which could be an issue if this memory region
227 * corresponds to a read-sensitive peripheral.
228 */
Antonio Nino Diazefabaa92017-04-27 13:30:22 +0100229 desc |= execute_never_mask;
230
Sandrine Bailleux7528b682016-06-14 16:29:04 +0100231 } else { /* Normal memory */
232 /*
233 * Always map read-write normal memory as execute-never.
Antonio Nino Diaz0842bd62018-07-12 15:54:10 +0100234 * This library assumes that it is used by software that does
235 * not self-modify its code, therefore R/W memory is reserved
236 * for data storage, which must not be executable.
237 *
Sandrine Bailleux7528b682016-06-14 16:29:04 +0100238 * Note that setting the XN bit here is for consistency only.
Antonio Nino Diazefabaa92017-04-27 13:30:22 +0100239 * The function that enables the MMU sets the SCTLR_ELx.WXN bit,
Sandrine Bailleux7528b682016-06-14 16:29:04 +0100240 * which makes any writable memory region to be treated as
241 * execute-never, regardless of the value of the XN bit in the
242 * translation table.
Sandrine Bailleuxac3aa682016-06-14 16:31:09 +0100243 *
244 * For read-only memory, rely on the MT_EXECUTE/MT_EXECUTE_NEVER
245 * attribute to figure out the value of the XN bit.
Sandrine Bailleux7528b682016-06-14 16:29:04 +0100246 */
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100247 if (((attr & MT_RW) != 0U) || ((attr & MT_EXECUTE_NEVER) != 0U)) {
Antonio Nino Diazefabaa92017-04-27 13:30:22 +0100248 desc |= execute_never_mask;
249 }
Sandrine Bailleux7528b682016-06-14 16:29:04 +0100250
251 if (mem_type == MT_MEMORY) {
252 desc |= LOWER_ATTRS(ATTR_IWBWA_OWBWA_NTR_INDEX | ISH);
253 } else {
254 assert(mem_type == MT_NON_CACHEABLE);
255 desc |= LOWER_ATTRS(ATTR_NON_CACHEABLE_INDEX | OSH);
256 }
Soby Mathew44170c42016-03-22 15:51:08 +0000257 }
258
259 debug_print((mem_type == MT_MEMORY) ? "MEM" :
260 ((mem_type == MT_NON_CACHEABLE) ? "NC" : "DEV"));
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100261 debug_print(((attr & MT_RW) != 0U) ? "-RW" : "-RO");
262 debug_print(((attr & MT_NS) != 0U) ? "-NS" : "-S");
263 debug_print(((attr & MT_EXECUTE_NEVER) != 0U) ? "-XN" : "-EXEC");
Soby Mathew44170c42016-03-22 15:51:08 +0000264 return desc;
265}
266
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100267/*
Nishanth Menon6f243462017-05-01 12:26:34 -0500268 * Look for the innermost region that contains the area at `base_va` with size
269 * `size`. Populate *attr with the attributes of this region.
270 *
271 * On success, this function returns 0.
272 * If there are partial overlaps (meaning that a smaller size is needed) or if
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100273 * the region can't be found in the given area, it returns MT_UNKNOWN. In this
274 * case the value pointed by attr should be ignored by the caller.
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100275 */
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100276static unsigned int mmap_region_attr(const mmap_region_t *mm, uintptr_t base_va,
277 size_t size, unsigned int *attr)
Soby Mathew44170c42016-03-22 15:51:08 +0000278{
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100279 /* Don't assume that the area is contained in the first region */
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100280 unsigned int ret = MT_UNKNOWN;
Soby Mathew44170c42016-03-22 15:51:08 +0000281
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100282 /*
283 * Get attributes from last (innermost) region that contains the
284 * requested area. Don't stop as soon as one region doesn't contain it
285 * because there may be other internal regions that contain this area:
286 *
287 * |-----------------------------1-----------------------------|
288 * |----2----| |-------3-------| |----5----|
289 * |--4--|
290 *
291 * |---| <- Area we want the attributes of.
292 *
293 * In this example, the area is contained in regions 1, 3 and 4 but not
294 * in region 2. The loop shouldn't stop at region 2 as inner regions
295 * have priority over outer regions, it should stop at region 5.
296 */
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100297 for ( ; ; ++mm) {
Soby Mathew44170c42016-03-22 15:51:08 +0000298
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100299 if (mm->size == 0U)
Nishanth Menon6f243462017-05-01 12:26:34 -0500300 return ret; /* Reached end of list */
Soby Mathew44170c42016-03-22 15:51:08 +0000301
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100302 if (mm->base_va > (base_va + size - 1U))
Nishanth Menon6f243462017-05-01 12:26:34 -0500303 return ret; /* Next region is after area so end */
Soby Mathew44170c42016-03-22 15:51:08 +0000304
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100305 if ((mm->base_va + mm->size - 1U) < base_va)
Soby Mathew44170c42016-03-22 15:51:08 +0000306 continue; /* Next region has already been overtaken */
307
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100308 if ((ret == 0U) && (mm->attr == *attr))
Soby Mathew44170c42016-03-22 15:51:08 +0000309 continue; /* Region doesn't override attribs so skip */
310
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100311 if ((mm->base_va > base_va) ||
312 ((mm->base_va + mm->size - 1U) < (base_va + size - 1U)))
313 return MT_UNKNOWN; /* Region doesn't fully cover area */
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100314
Nishanth Menon6f243462017-05-01 12:26:34 -0500315 *attr = mm->attr;
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100316 ret = 0U;
Soby Mathew44170c42016-03-22 15:51:08 +0000317 }
Nishanth Menon6f243462017-05-01 12:26:34 -0500318 return ret;
Soby Mathew44170c42016-03-22 15:51:08 +0000319}
320
321static mmap_region_t *init_xlation_table_inner(mmap_region_t *mm,
322 uintptr_t base_va,
323 uint64_t *table,
David Cunado8a354f12017-06-21 16:52:45 +0100324 unsigned int level)
Soby Mathew44170c42016-03-22 15:51:08 +0000325{
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100326 assert((level >= XLAT_TABLE_LEVEL_MIN) &&
327 (level <= XLAT_TABLE_LEVEL_MAX));
Soby Mathew44170c42016-03-22 15:51:08 +0000328
Antonio Nino Diazd48ae612016-08-02 09:21:41 +0100329 unsigned int level_size_shift =
330 L0_XLAT_ADDRESS_SHIFT - level * XLAT_TABLE_ENTRIES_SHIFT;
331 u_register_t level_size = (u_register_t)1 << level_size_shift;
332 u_register_t level_index_mask =
333 ((u_register_t)XLAT_TABLE_ENTRIES_MASK) << level_size_shift;
Soby Mathew44170c42016-03-22 15:51:08 +0000334
335 debug_print("New xlat table:\n");
336
337 do {
338 uint64_t desc = UNSET_DESC;
339
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100340 if (mm->size == 0U) {
Soby Mathew44170c42016-03-22 15:51:08 +0000341 /* Done mapping regions; finish zeroing the table */
342 desc = INVALID_DESC;
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100343 } else if ((mm->base_va + mm->size - 1U) < base_va) {
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100344 /* This area is after the region so get next region */
Soby Mathew44170c42016-03-22 15:51:08 +0000345 ++mm;
346 continue;
347 }
348
Antonio Nino Diazd48ae612016-08-02 09:21:41 +0100349 debug_print("%s VA:%p size:0x%llx ", get_level_spacer(level),
350 (void *)base_va, (unsigned long long)level_size);
Soby Mathew44170c42016-03-22 15:51:08 +0000351
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100352 if (mm->base_va > (base_va + level_size - 1U)) {
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100353 /* Next region is after this area. Nothing to map yet */
Soby Mathew44170c42016-03-22 15:51:08 +0000354 desc = INVALID_DESC;
Antonio Nino Diaz010888d2016-12-13 15:02:31 +0000355 /* Make sure that the current level allows block descriptors */
356 } else if (level >= XLAT_BLOCK_LEVEL_MIN) {
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100357 /*
358 * Try to get attributes of this area. It will fail if
359 * there are partially overlapping regions. On success,
360 * it will return the innermost region's attributes.
361 */
Antonio Nino Diaz8643a812018-06-21 14:39:16 +0100362 unsigned int attr;
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100363 unsigned int r = mmap_region_attr(mm, base_va,
364 level_size, &attr);
Nishanth Menon6f243462017-05-01 12:26:34 -0500365
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100366 if (r == 0U) {
Soby Mathew44170c42016-03-22 15:51:08 +0000367 desc = mmap_desc(attr,
368 base_va - mm->base_va + mm->base_pa,
369 level);
Antonio Nino Diazb5e7f772016-03-30 15:45:57 +0100370 }
Soby Mathew44170c42016-03-22 15:51:08 +0000371 }
Soby Mathew44170c42016-03-22 15:51:08 +0000372
373 if (desc == UNSET_DESC) {
374 /* Area not covered by a region so need finer table */
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100375 uint64_t *new_table = xlat_tables[next_xlat];
376
377 next_xlat++;
Soby Mathew44170c42016-03-22 15:51:08 +0000378 assert(next_xlat <= MAX_XLAT_TABLES);
Soby Mathewa0fedc42016-06-16 14:52:04 +0100379 desc = TABLE_DESC | (uintptr_t)new_table;
Soby Mathew44170c42016-03-22 15:51:08 +0000380
381 /* Recurse to fill in new table */
382 mm = init_xlation_table_inner(mm, base_va,
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100383 new_table, level + 1U);
Soby Mathew44170c42016-03-22 15:51:08 +0000384 }
385
386 debug_print("\n");
387
388 *table++ = desc;
389 base_va += level_size;
Antonio Nino Diazd1beee22016-12-13 15:28:54 +0000390 } while ((base_va & level_index_mask) &&
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100391 ((base_va - 1U) < (PLAT_VIRT_ADDR_SPACE_SIZE - 1U)));
Soby Mathew44170c42016-03-22 15:51:08 +0000392
393 return mm;
394}
395
396void init_xlation_table(uintptr_t base_va, uint64_t *table,
David Cunado8a354f12017-06-21 16:52:45 +0100397 unsigned int level, uintptr_t *max_va,
Soby Mathew44170c42016-03-22 15:51:08 +0000398 unsigned long long *max_pa)
399{
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100400 unsigned int el = xlat_arch_current_el();
Antonio Nino Diaz49074492018-04-26 12:59:08 +0100401
402 execute_never_mask = xlat_arch_get_xn_desc(el);
403
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100404 if (el == 3U) {
Antonio Nino Diaz49074492018-04-26 12:59:08 +0100405 ap1_mask = LOWER_ATTRS(AP_ONE_VA_RANGE_RES1);
406 } else {
Antonio Nino Diaz50eb3742018-07-24 10:20:53 +0100407 assert(el == 1U);
408 ap1_mask = 0ULL;
Antonio Nino Diaz49074492018-04-26 12:59:08 +0100409 }
410
Soby Mathew44170c42016-03-22 15:51:08 +0000411 init_xlation_table_inner(mmap, base_va, table, level);
412 *max_va = xlat_max_va;
413 *max_pa = xlat_max_pa;
414}