blob: d7e0b3f5d03267ed271a2aa2f488a5764857d3bf [file] [log] [blame]
Yatharth Kocharf528faf2016-06-28 16:58:26 +01001/*
Douglas Raillard306593d2017-02-24 18:14:15 +00002 * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
Yatharth Kocharf528faf2016-06-28 16:58:26 +01003 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#ifndef __EL3_COMMON_MACROS_S__
32#define __EL3_COMMON_MACROS_S__
33
34#include <arch.h>
35#include <asm_macros.S>
36#include <assert_macros.S>
37
38 /*
39 * Helper macro to initialise EL3 registers we care about.
40 */
41 .macro el3_arch_init_common _exception_vectors
42 /* ---------------------------------------------------------------------
43 * Enable the instruction cache and alignment checks
44 * ---------------------------------------------------------------------
45 */
46 ldr r1, =(SCTLR_RES1 | SCTLR_I_BIT | SCTLR_A_BIT)
47 ldcopr r0, SCTLR
48 orr r0, r0, r1
49 stcopr r0, SCTLR
50 isb
51
52 /* ---------------------------------------------------------------------
53 * Set the exception vectors (VBAR/MVBAR).
54 * ---------------------------------------------------------------------
55 */
56 ldr r0, =\_exception_vectors
57 stcopr r0, VBAR
58 stcopr r0, MVBAR
59 isb
60
61 /* -----------------------------------------------------
62 * Enable the SIF bit to disable instruction fetches
63 * from Non-secure memory.
64 * -----------------------------------------------------
65 */
66 ldcopr r0, SCR
67 orr r0, r0, #SCR_SIF_BIT
68 stcopr r0, SCR
69
70 /* -----------------------------------------------------
71 * Enable the Asynchronous data abort now that the
72 * exception vectors have been setup.
73 * -----------------------------------------------------
74 */
75 cpsie a
76 isb
77
78 /* Enable access to Advanced SIMD registers */
79 ldcopr r0, NSACR
80 bic r0, r0, #NSASEDIS_BIT
81 bic r0, r0, #NSTRCDIS_BIT
82 orr r0, r0, #(NASCR_CP10_BIT | NASCR_CP11_BIT)
83 stcopr r0, NSACR
84 isb
85
86 /*
87 * Enable access to Advanced SIMD, Floating point and to the Trace
88 * functionality as well.
89 */
90 ldcopr r0, CPACR
91 bic r0, r0, #ASEDIS_BIT
92 bic r0, r0, #TRCDIS_BIT
93 orr r0, r0, #CPACR_ENABLE_FP_ACCESS
94 stcopr r0, CPACR
95 isb
96
97 vmrs r0, FPEXC
98 orr r0, r0, #FPEXC_EN_BIT
99 vmsr FPEXC, r0
100 isb
dp-arm595d0d52017-02-08 11:51:50 +0000101
102 /* Disable secure self-hosted invasive debug. */
103 ldr r0, =SDCR_DEF_VAL
104 stcopr r0, SDCR
105
Yatharth Kocharf528faf2016-06-28 16:58:26 +0100106 .endm
107
108/* -----------------------------------------------------------------------------
109 * This is the super set of actions that need to be performed during a cold boot
110 * or a warm boot in EL3. This code is shared by BL1 and BL32 (SP_MIN).
111 *
112 * This macro will always perform reset handling, architectural initialisations
113 * and stack setup. The rest of the actions are optional because they might not
114 * be needed, depending on the context in which this macro is called. This is
115 * why this macro is parameterised ; each parameter allows to enable/disable
116 * some actions.
117 *
118 * _set_endian:
119 * Whether the macro needs to configure the endianness of data accesses.
120 *
121 * _warm_boot_mailbox:
122 * Whether the macro needs to detect the type of boot (cold/warm). The
123 * detection is based on the platform entrypoint address : if it is zero
124 * then it is a cold boot, otherwise it is a warm boot. In the latter case,
125 * this macro jumps on the platform entrypoint address.
126 *
127 * _secondary_cold_boot:
128 * Whether the macro needs to identify the CPU that is calling it: primary
129 * CPU or secondary CPU. The primary CPU will be allowed to carry on with
130 * the platform initialisations, while the secondaries will be put in a
131 * platform-specific state in the meantime.
132 *
133 * If the caller knows this macro will only be called by the primary CPU
134 * then this parameter can be defined to 0 to skip this step.
135 *
136 * _init_memory:
137 * Whether the macro needs to initialise the memory.
138 *
139 * _init_c_runtime:
140 * Whether the macro needs to initialise the C runtime environment.
141 *
142 * _exception_vectors:
143 * Address of the exception vectors to program in the VBAR_EL3 register.
144 * -----------------------------------------------------------------------------
145 */
146 .macro el3_entrypoint_common \
147 _set_endian, _warm_boot_mailbox, _secondary_cold_boot, \
148 _init_memory, _init_c_runtime, _exception_vectors
149
150 /* Make sure we are in Secure Mode */
151#if ASM_ASSERTION
152 ldcopr r0, SCR
153 tst r0, #SCR_NS_BIT
154 ASM_ASSERT(eq)
155#endif
156
157 .if \_set_endian
158 /* -------------------------------------------------------------
159 * Set the CPU endianness before doing anything that might
160 * involve memory reads or writes.
161 * -------------------------------------------------------------
162 */
163 ldcopr r0, SCTLR
164 bic r0, r0, #SCTLR_EE_BIT
165 stcopr r0, SCTLR
166 isb
167 .endif /* _set_endian */
168
169 /* Switch to monitor mode */
170 cps #MODE32_mon
171 isb
172
173 .if \_warm_boot_mailbox
174 /* -------------------------------------------------------------
175 * This code will be executed for both warm and cold resets.
176 * Now is the time to distinguish between the two.
177 * Query the platform entrypoint address and if it is not zero
178 * then it means it is a warm boot so jump to this address.
179 * -------------------------------------------------------------
180 */
181 bl plat_get_my_entrypoint
182 cmp r0, #0
183 bxne r0
184 .endif /* _warm_boot_mailbox */
185
186 /* ---------------------------------------------------------------------
187 * It is a cold boot.
188 * Perform any processor specific actions upon reset e.g. cache, TLB
189 * invalidations etc.
190 * ---------------------------------------------------------------------
191 */
192 bl reset_handler
193
194 el3_arch_init_common \_exception_vectors
195
196 .if \_secondary_cold_boot
197 /* -------------------------------------------------------------
198 * Check if this is a primary or secondary CPU cold boot.
199 * The primary CPU will set up the platform while the
200 * secondaries are placed in a platform-specific state until the
201 * primary CPU performs the necessary actions to bring them out
202 * of that state and allows entry into the OS.
203 * -------------------------------------------------------------
204 */
205 bl plat_is_my_cpu_primary
206 cmp r0, #0
207 bne do_primary_cold_boot
208
209 /* This is a cold boot on a secondary CPU */
210 bl plat_secondary_cold_boot_setup
211 /* plat_secondary_cold_boot_setup() is not supposed to return */
Jeenu Viswambharan68aef102016-11-30 15:21:11 +0000212 no_ret plat_panic_handler
Yatharth Kocharf528faf2016-06-28 16:58:26 +0100213
214 do_primary_cold_boot:
215 .endif /* _secondary_cold_boot */
216
217 /* ---------------------------------------------------------------------
218 * Initialize memory now. Secondary CPU initialization won't get to this
219 * point.
220 * ---------------------------------------------------------------------
221 */
222
223 .if \_init_memory
224 bl platform_mem_init
225 .endif /* _init_memory */
226
227 /* ---------------------------------------------------------------------
228 * Init C runtime environment:
229 * - Zero-initialise the NOBITS sections. There are 2 of them:
230 * - the .bss section;
231 * - the coherent memory section (if any).
232 * - Relocate the data section from ROM to RAM, if required.
233 * ---------------------------------------------------------------------
234 */
235 .if \_init_c_runtime
Masahiro Yamada441bfdd2016-12-25 23:36:24 +0900236#ifdef IMAGE_BL32
Yatharth Kocharf528faf2016-06-28 16:58:26 +0100237 /* -----------------------------------------------------------------
238 * Invalidate the RW memory used by the BL32 (SP_MIN) image. This
239 * includes the data and NOBITS sections. This is done to
240 * safeguard against possible corruption of this memory by
241 * dirty cache lines in a system cache as a result of use by
242 * an earlier boot loader stage.
243 * -----------------------------------------------------------------
244 */
245 ldr r0, =__RW_START__
246 ldr r1, =__RW_END__
247 sub r1, r1, r0
248 bl inv_dcache_range
249#endif /* IMAGE_BL32 */
250
251 ldr r0, =__BSS_START__
252 ldr r1, =__BSS_SIZE__
253 bl zeromem
254
255#if USE_COHERENT_MEM
256 ldr r0, =__COHERENT_RAM_START__
257 ldr r1, =__COHERENT_RAM_UNALIGNED_SIZE__
258 bl zeromem
259#endif
260
Masahiro Yamada441bfdd2016-12-25 23:36:24 +0900261#ifdef IMAGE_BL1
Yatharth Kocharf528faf2016-06-28 16:58:26 +0100262 /* -----------------------------------------------------
263 * Copy data from ROM to RAM.
264 * -----------------------------------------------------
265 */
266 ldr r0, =__DATA_RAM_START__
267 ldr r1, =__DATA_ROM_START__
268 ldr r2, =__DATA_SIZE__
Yatharth Kocharc44c5af2016-09-28 11:00:05 +0100269 bl memcpy4
Yatharth Kocharf528faf2016-06-28 16:58:26 +0100270#endif
271 .endif /* _init_c_runtime */
272
273 /* ---------------------------------------------------------------------
274 * Allocate a stack whose memory will be marked as Normal-IS-WBWA when
275 * the MMU is enabled. There is no risk of reading stale stack memory
276 * after enabling the MMU as only the primary CPU is running at the
277 * moment.
278 * ---------------------------------------------------------------------
279 */
280 bl plat_set_my_stack
Douglas Raillard306593d2017-02-24 18:14:15 +0000281
282#if STACK_PROTECTOR_ENABLED
283 .if \_init_c_runtime
284 bl update_stack_protector_canary
285 .endif /* _init_c_runtime */
286#endif
Yatharth Kocharf528faf2016-06-28 16:58:26 +0100287 .endm
288
289#endif /* __EL3_COMMON_MACROS_S__ */