blob: 48b4ac80deae992c07750896645aacf63688140e [file] [log] [blame]
Dan Handley9df48042015-03-19 18:58:55 +00001/*
2 * Copyright (c) 2015, ARM Limited and Contributors. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30#include <arch.h>
31#include <arch_helpers.h>
32#include <cci.h>
33#include <mmio.h>
34#include <plat_arm.h>
35#include <xlat_tables.h>
36
37
38static const int cci_map[] = {
39 PLAT_ARM_CCI_CLUSTER0_SL_IFACE_IX,
40 PLAT_ARM_CCI_CLUSTER1_SL_IFACE_IX
41};
42
43/* Weak definitions may be overridden in specific ARM standard platform */
44#pragma weak plat_get_ns_image_entrypoint
45
46
47/*******************************************************************************
48 * Macro generating the code for the function setting up the pagetables as per
49 * the platform memory map & initialize the mmu, for the given exception level
50 ******************************************************************************/
51#if USE_COHERENT_MEM
52#define DEFINE_CONFIGURE_MMU_EL(_el) \
53 void arm_configure_mmu_el##_el(unsigned long total_base, \
54 unsigned long total_size, \
55 unsigned long ro_start, \
56 unsigned long ro_limit, \
57 unsigned long coh_start, \
58 unsigned long coh_limit) \
59 { \
60 mmap_add_region(total_base, total_base, \
61 total_size, \
62 MT_MEMORY | MT_RW | MT_SECURE); \
63 mmap_add_region(ro_start, ro_start, \
64 ro_limit - ro_start, \
65 MT_MEMORY | MT_RO | MT_SECURE); \
66 mmap_add_region(coh_start, coh_start, \
67 coh_limit - coh_start, \
68 MT_DEVICE | MT_RW | MT_SECURE); \
69 mmap_add(plat_arm_mmap); \
70 init_xlat_tables(); \
71 \
72 enable_mmu_el##_el(0); \
73 }
74#else
75#define DEFINE_CONFIGURE_MMU_EL(_el) \
76 void arm_configure_mmu_el##_el(unsigned long total_base, \
77 unsigned long total_size, \
78 unsigned long ro_start, \
79 unsigned long ro_limit) \
80 { \
81 mmap_add_region(total_base, total_base, \
82 total_size, \
83 MT_MEMORY | MT_RW | MT_SECURE); \
84 mmap_add_region(ro_start, ro_start, \
85 ro_limit - ro_start, \
86 MT_MEMORY | MT_RO | MT_SECURE); \
87 mmap_add(plat_arm_mmap); \
88 init_xlat_tables(); \
89 \
90 enable_mmu_el##_el(0); \
91 }
92#endif
93
94/* Define EL1 and EL3 variants of the function initialising the MMU */
95DEFINE_CONFIGURE_MMU_EL(1)
96DEFINE_CONFIGURE_MMU_EL(3)
97
98
99unsigned long plat_get_ns_image_entrypoint(void)
100{
101 return PLAT_ARM_NS_IMAGE_OFFSET;
102}
103
104/*******************************************************************************
105 * Gets SPSR for BL32 entry
106 ******************************************************************************/
107uint32_t arm_get_spsr_for_bl32_entry(void)
108{
109 /*
110 * The Secure Payload Dispatcher service is responsible for
111 * setting the SPSR prior to entry into the BL3-2 image.
112 */
113 return 0;
114}
115
116/*******************************************************************************
117 * Gets SPSR for BL33 entry
118 ******************************************************************************/
119uint32_t arm_get_spsr_for_bl33_entry(void)
120{
121 unsigned long el_status;
122 unsigned int mode;
123 uint32_t spsr;
124
125 /* Figure out what mode we enter the non-secure world in */
126 el_status = read_id_aa64pfr0_el1() >> ID_AA64PFR0_EL2_SHIFT;
127 el_status &= ID_AA64PFR0_ELX_MASK;
128
129 mode = (el_status) ? MODE_EL2 : MODE_EL1;
130
131 /*
132 * TODO: Consider the possibility of specifying the SPSR in
133 * the FIP ToC and allowing the platform to have a say as
134 * well.
135 */
136 spsr = SPSR_64(mode, MODE_SP_ELX, DISABLE_ALL_EXCEPTIONS);
137 return spsr;
138}
139
140
141void arm_cci_init(void)
142{
143 cci_init(PLAT_ARM_CCI_BASE, cci_map, ARRAY_SIZE(cci_map));
144}