blob: abc501a90dc3e1f424a6da1830d34d98cee8fb7f [file] [log] [blame]
Konstantin Porotchkinf69ec582018-06-07 18:31:14 +03001/*
2 * Copyright (C) 2018 Marvell International Ltd.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 * https://spdx.org/licenses
6 */
7
8#include <arch.h>
9#include <arch_helpers.h>
10#include <assert.h>
11#include <debug.h>
12#include <mmio.h>
13#include <plat_marvell.h>
14#include <platform_def.h>
15#include <xlat_tables.h>
16
17
18/* Weak definitions may be overridden in specific ARM standard platform */
19#pragma weak plat_get_ns_image_entrypoint
20#pragma weak plat_marvell_get_mmap
21
22/*
23 * Set up the page tables for the generic and platform-specific memory regions.
24 * The extents of the generic memory regions are specified by the function
25 * arguments and consist of:
26 * - Trusted SRAM seen by the BL image;
27 * - Code section;
28 * - Read-only data section;
29 * - Coherent memory region, if applicable.
30 */
31void marvell_setup_page_tables(uintptr_t total_base,
32 size_t total_size,
33 uintptr_t code_start,
34 uintptr_t code_limit,
35 uintptr_t rodata_start,
36 uintptr_t rodata_limit
37#if USE_COHERENT_MEM
38 ,
39 uintptr_t coh_start,
40 uintptr_t coh_limit
41#endif
42 )
43{
44 /*
45 * Map the Trusted SRAM with appropriate memory attributes.
46 * Subsequent mappings will adjust the attributes for specific regions.
47 */
48 VERBOSE("Trusted SRAM seen by this BL image: %p - %p\n",
49 (void *) total_base, (void *) (total_base + total_size));
50 mmap_add_region(total_base, total_base,
51 total_size,
52 MT_MEMORY | MT_RW | MT_SECURE);
53
54 /* Re-map the code section */
55 VERBOSE("Code region: %p - %p\n",
56 (void *) code_start, (void *) code_limit);
57 mmap_add_region(code_start, code_start,
58 code_limit - code_start,
59 MT_CODE | MT_SECURE);
60
61 /* Re-map the read-only data section */
62 VERBOSE("Read-only data region: %p - %p\n",
63 (void *) rodata_start, (void *) rodata_limit);
64 mmap_add_region(rodata_start, rodata_start,
65 rodata_limit - rodata_start,
66 MT_RO_DATA | MT_SECURE);
67
68#if USE_COHERENT_MEM
69 /* Re-map the coherent memory region */
70 VERBOSE("Coherent region: %p - %p\n",
71 (void *) coh_start, (void *) coh_limit);
72 mmap_add_region(coh_start, coh_start,
73 coh_limit - coh_start,
74 MT_DEVICE | MT_RW | MT_SECURE);
75#endif
76
77 /* Now (re-)map the platform-specific memory regions */
78 mmap_add(plat_marvell_get_mmap());
79
80 /* Create the page tables to reflect the above mappings */
81 init_xlat_tables();
82}
83
84unsigned long plat_get_ns_image_entrypoint(void)
85{
86 return PLAT_MARVELL_NS_IMAGE_OFFSET;
87}
88
89/*****************************************************************************
90 * Gets SPSR for BL32 entry
91 *****************************************************************************
92 */
93uint32_t marvell_get_spsr_for_bl32_entry(void)
94{
95 /*
96 * The Secure Payload Dispatcher service is responsible for
97 * setting the SPSR prior to entry into the BL32 image.
98 */
99 return 0;
100}
101
102/*****************************************************************************
103 * Gets SPSR for BL33 entry
104 *****************************************************************************
105 */
106uint32_t marvell_get_spsr_for_bl33_entry(void)
107{
108 unsigned long el_status;
109 unsigned int mode;
110 uint32_t spsr;
111
112 /* Figure out what mode we enter the non-secure world in */
113 el_status = read_id_aa64pfr0_el1() >> ID_AA64PFR0_EL2_SHIFT;
114 el_status &= ID_AA64PFR0_ELX_MASK;
115
116 mode = (el_status) ? MODE_EL2 : MODE_EL1;
117
118 /*
119 * TODO: Consider the possibility of specifying the SPSR in
120 * the FIP ToC and allowing the platform to have a say as
121 * well.
122 */
123 spsr = SPSR_64(mode, MODE_SP_ELX, DISABLE_ALL_EXCEPTIONS);
124 return spsr;
125}
126
127/*****************************************************************************
128 * Returns ARM platform specific memory map regions.
129 *****************************************************************************
130 */
131const mmap_region_t *plat_marvell_get_mmap(void)
132{
133 return plat_marvell_mmap;
134}
135