blob: abfe1eef9bd8d17202e90c8c387454337ae46087 [file] [log] [blame]
Jeenu Viswambharane86a2472018-07-05 15:24:45 +01001/*
Daniel Boulby95fb1aa2022-01-19 11:20:05 +00002 * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved.
Varun Wadekard1c1ef32022-08-03 12:01:36 +01003 * Copyright (c) 2022, NVIDIA Corporation. All rights reserved.
Jeenu Viswambharane86a2472018-07-05 15:24:45 +01004 *
5 * SPDX-License-Identifier: BSD-3-Clause
6 */
7
8
Jeenu Viswambharan93bc4bd2018-05-17 11:24:01 +01009#include <assert_macros.S>
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010010#include <asm_macros.S>
Jeenu Viswambharan476c29f2018-02-19 12:25:53 +000011#include <assert_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000012#include <bl31/ea_handle.h>
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010013#include <context.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000014#include <lib/extensions/ras_arch.h>
laurenw-arm94accd32019-08-20 15:51:24 -050015#include <cpu_macros.S>
16#include <context.h>
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010017
Manish Pandeyc918c182023-01-11 21:53:02 +000018 .globl handle_lower_el_sync_ea
19 .globl handle_lower_el_async_ea
Manish Pandey07952fb2023-05-25 13:46:14 +010020 .globl handle_pending_async_ea
21 .globl reflect_pending_async_ea_to_lower_el
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010022/*
23 * This function forms the tail end of Synchronous Exception entry from lower
laurenw-arm94accd32019-08-20 15:51:24 -050024 * EL, and expects to handle Synchronous External Aborts from lower EL and CPU
25 * Implementation Defined Exceptions. If any other kind of exception is detected,
26 * then this function reports unhandled exception.
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010027 *
Manish Pandeyc918c182023-01-11 21:53:02 +000028 * It delegates the handling of the EA to platform handler, and upon successfully
29 * handling the EA, exits EL3; otherwise panics.
30 *
31 * This function assumes x30 has been saved.
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010032 */
Manish Pandeyc918c182023-01-11 21:53:02 +000033func handle_lower_el_sync_ea
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010034 mrs x30, esr_el3
35 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
36
37 /* Check for I/D aborts from lower EL */
38 cmp x30, #EC_IABORT_LOWER_EL
39 b.eq 1f
40
41 cmp x30, #EC_DABORT_LOWER_EL
laurenw-arm94accd32019-08-20 15:51:24 -050042 b.eq 1f
43
44 /* Save GP registers */
45 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
46 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
47 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
48
49 /* Get the cpu_ops pointer */
50 bl get_cpu_ops_ptr
51
52 /* Get the cpu_ops exception handler */
53 ldr x0, [x0, #CPU_E_HANDLER_FUNC]
54
55 /*
56 * If the reserved function pointer is NULL, this CPU does not have an
57 * implementation defined exception handler function
58 */
59 cbz x0, 2f
60 mrs x1, esr_el3
61 ubfx x1, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH
62 blr x0
63 b 2f
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010064
651:
Alexei Fedorov503bbf32019-08-13 15:17:53 +010066 /*
Alexei Fedorovf41355c2019-09-13 14:11:59 +010067 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
Boyan Karatoteved85cf72022-12-06 09:03:42 +000068 * Also save PMCR_EL0 and set the PSTATE to a known state.
Alexei Fedorov503bbf32019-08-13 15:17:53 +010069 */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +000070 bl prepare_el3_entry
Alexei Fedorov503bbf32019-08-13 15:17:53 +010071
Antonio Nino Diaz25cda672019-02-19 11:53:51 +000072#if ENABLE_PAUTH
Alexei Fedorovf41355c2019-09-13 14:11:59 +010073 /* Load and program APIAKey firmware key */
74 bl pauth_load_bl31_apiakey
Antonio Nino Diaz25cda672019-02-19 11:53:51 +000075#endif
Antonio Nino Diaz594811b2019-01-31 11:58:00 +000076
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010077 /* Setup exception class and syndrome arguments for platform handler */
78 mov x0, #ERROR_EA_SYNC
79 mrs x1, esr_el3
Jan Dabrosfa015982019-12-02 13:30:03 +010080 bl delegate_sync_ea
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010081
Jan Dabrosfa015982019-12-02 13:30:03 +010082 /* el3_exit assumes SP_EL0 on entry */
83 msr spsel, #MODE_SP_EL0
84 b el3_exit
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100852:
laurenw-arm94accd32019-08-20 15:51:24 -050086 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
87 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
88 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
89
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010090 /* Synchronous exceptions other than the above are assumed to be EA */
91 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
92 no_ret report_unhandled_exception
Manish Pandeyc918c182023-01-11 21:53:02 +000093endfunc handle_lower_el_sync_ea
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010094
95
96/*
97 * This function handles SErrors from lower ELs.
98 *
Manish Pandeyc918c182023-01-11 21:53:02 +000099 * It delegates the handling of the EA to platform handler, and upon successfully
100 * handling the EA, exits EL3; otherwise panics.
101 *
102 * This function assumes x30 has been saved.
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100103 */
Manish Pandeyc918c182023-01-11 21:53:02 +0000104func handle_lower_el_async_ea
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100105
Alexei Fedorov503bbf32019-08-13 15:17:53 +0100106 /*
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100107 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
Boyan Karatoteved85cf72022-12-06 09:03:42 +0000108 * Also save PMCR_EL0 and set the PSTATE to a known state.
Alexei Fedorov503bbf32019-08-13 15:17:53 +0100109 */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000110 bl prepare_el3_entry
Alexei Fedorov503bbf32019-08-13 15:17:53 +0100111
Antonio Nino Diaz25cda672019-02-19 11:53:51 +0000112#if ENABLE_PAUTH
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100113 /* Load and program APIAKey firmware key */
114 bl pauth_load_bl31_apiakey
Antonio Nino Diaz25cda672019-02-19 11:53:51 +0000115#endif
Antonio Nino Diaz594811b2019-01-31 11:58:00 +0000116
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100117 /* Setup exception class and syndrome arguments for platform handler */
118 mov x0, #ERROR_EA_ASYNC
119 mrs x1, esr_el3
Jan Dabrosfa015982019-12-02 13:30:03 +0100120 bl delegate_async_ea
121
122 /* el3_exit assumes SP_EL0 on entry */
123 msr spsel, #MODE_SP_EL0
124 b el3_exit
Manish Pandeyc918c182023-01-11 21:53:02 +0000125endfunc handle_lower_el_async_ea
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100126
Manish Pandey07952fb2023-05-25 13:46:14 +0100127/*
128 * NOTE 1 : Synchronized async EA handling
129 *
130 * Comment here applicable to following two functions
131 * - handle_pending_async_ea
132 * - reflect_pending_async_ea_to_lower_el
133 *
134 * Must be called from exception vector directly.
135 *
136 * These special handling is required to cater for handling async EA from
137 * lower EL synchronized at EL3 entry.
138 *
139 * This scenario may arise when there is an error (EA) in the system which is not
140 * yet signaled to PE while executing in lower EL. During entry into EL3, the errors
141 * are synchronized either implicitly or explicitly causing async EA to pend at EL3.
142 *
143 * On detecting the pending EA (via ISR_EL1.A), based on routing model of EA
144 * either handle it in EL3 using "handle_pending_async_ea" (FFH) or return to
145 * lower EL using "reflect_pending_async_ea_to_lower_el" (KFH) .
146 */
147
148/*
149 * Refer to NOTE 1 : Firmware First Handling (FFH)
150 * Called when FFH is enabled and outgoing world is Non-Secure (scr_el3.ea = 1).
151 *
152 * This function assumes x30 has been saved.
153 */
154#if HANDLE_EA_EL3_FIRST_NS
155func handle_pending_async_ea
156 /*
157 * Prepare for nested handling of EA. Stash sysregs clobbered by nested
158 * exception and handler
159 */
160 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR]
161 mrs x30, esr_el3
162 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3]
163 mrs x30, spsr_el3
164 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3]
165 mrs x30, elr_el3
166 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
167
168 mov x30, #1
169 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
170 /*
171 * Restore the original x30 saved as part of entering EL3. This is not
172 * required for the current function but for EL3 SError vector entry
173 * once PSTATE.A bit is unmasked. We restore x30 and then the same
174 * value is stored in EL3 SError vector entry.
175 */
176 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
177
178 /*
179 * After clearing PSTATE.A bit pending SError will trigger at current EL.
180 * Put explicit synchronization event to ensure newly unmasked interrupt
181 * is taken immediately.
182 */
183 unmask_async_ea
184
185 /* Restore the original exception information along with zeroing the storage */
186 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
187 msr elr_el3, x30
188 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
189 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3]
190 msr spsr_el3, x30
191 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3]
192 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3]
193 msr esr_el3, x30
194 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3]
195
196 /*
197 * If the original exception corresponds to SError from lower El, eret back
198 * to lower EL, otherwise return to vector table for original exception handling.
199 */
200 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
201 cmp x30, #EC_SERROR
202 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR]
203 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR]
204 b.eq 1f
205 ret
2061:
207 exception_return
208endfunc handle_pending_async_ea
209#endif /* HANDLE_EA_EL3_FIRST_NS */
210
211/*
212 * Refer to NOTE 1 : Kernel First handling (KFH)
213 * Called in following scenarios
214 * - Always, if outgoing world is either Secure or Realm
215 * - KFH mode if outgoing world is Non-secure.
216 *
217 * This function assumes x30 has been saved.
218 */
219
220func reflect_pending_async_ea_to_lower_el
221 /*
222 * As the original exception was not handled we need to ensure that we return
223 * back to the instruction which caused the exception. To acheive that, eret
224 * to "elr-4" (Label "subtract_elr_el3") for SMC or simply eret otherwise
225 * (Label "skip_smc_check").
226 *
227 * LIMITATION: It could be that async EA is masked at the target exception level
228 * or the priority of async EA wrt to the EL3/secure interrupt is lower, which
229 * causes back and forth between lower EL and EL3. In case of back and forth between
230 * lower EL and EL3, we can track the loop count in "CTX_NESTED_EA_FLAG" and leverage
231 * previous ELR in "CTX_SAVED_ELR_EL3" to detect this cycle and further panic
232 * to indicate a problem here (Label "check_loop_ctr").
233 * However, setting SCR_EL3.IESB = 1, should give priority to SError handling
234 * as per AArch64.TakeException pseudo code in Arm ARM.
235 *
236 * TODO: In future if EL3 gets a capability to inject a virtual SError to lower
237 * ELs, we can remove the el3_panic and handle the original exception first and
238 * inject SError to lower EL before ereting back.
239 */
240 stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
241 ldr x29, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
242 mrs x28, elr_el3
243 cmp x29, x28
244 b.eq check_loop_ctr
245 str x28, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
246 /* Zero the loop counter */
247 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
248 b skip_loop_ctr
249check_loop_ctr:
250 ldr x29, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
251 add x29, x29, #1
252 str x29, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
253 cmp x29, #ASYNC_EA_REPLAY_COUNTER
254 b.ge el3_panic
255skip_loop_ctr:
256 /*
257 * Logic to distinguish if we came from SMC or any other exception.
258 * Use offsets in vector entry to get which exception we are handling.
259 * In each vector entry of size 0x200, address "0x0-0x80" is for sync
260 * exception and "0x80-0x200" is for async exceptions.
261 * Use vector base address (vbar_el3) and exception offset (LR) to
262 * calculate whether the address we came from is any of the following
263 * "0x0-0x80", "0x200-0x280", "0x400-0x480" or "0x600-0x680"
264 */
265 mrs x29, vbar_el3
266 sub x30, x30, x29
267 and x30, x30, #0x1ff
268 cmp x30, #0x80
269 b.ge skip_smc_check
270 /* Its a synchronous exception, Now check if it is SMC or not? */
271 mrs x30, esr_el3
272 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
273 cmp x30, #EC_AARCH32_SMC
274 b.eq subtract_elr_el3
275 cmp x30, #EC_AARCH64_SMC
276 b.eq subtract_elr_el3
277 b skip_smc_check
278subtract_elr_el3:
279 sub x28, x28, #4
280skip_smc_check:
281 msr elr_el3, x28
282 ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
283 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
284 exception_return
285endfunc reflect_pending_async_ea_to_lower_el
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100286
287/*
Jeenu Viswambharan9d4c9c12018-05-17 09:52:36 +0100288 * Prelude for Synchronous External Abort handling. This function assumes that
289 * all GP registers have been saved by the caller.
290 *
291 * x0: EA reason
292 * x1: EA syndrome
293 */
294func delegate_sync_ea
Manish Pandeyf90a73c2023-10-10 15:42:19 +0100295#if ENABLE_FEAT_RAS
Jeenu Viswambharan9d4c9c12018-05-17 09:52:36 +0100296 /*
297 * Check for Uncontainable error type. If so, route to the platform
298 * fatal error handler rather than the generic EA one.
299 */
300 ubfx x2, x1, #EABORT_SET_SHIFT, #EABORT_SET_WIDTH
301 cmp x2, #ERROR_STATUS_SET_UC
302 b.ne 1f
303
304 /* Check fault status code */
305 ubfx x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
306 cmp x3, #SYNC_EA_FSC
307 b.ne 1f
308
309 no_ret plat_handle_uncontainable_ea
3101:
311#endif
312
313 b ea_proceed
314endfunc delegate_sync_ea
315
316
317/*
318 * Prelude for Asynchronous External Abort handling. This function assumes that
319 * all GP registers have been saved by the caller.
320 *
321 * x0: EA reason
322 * x1: EA syndrome
323 */
324func delegate_async_ea
Manish Pandeyf90a73c2023-10-10 15:42:19 +0100325#if ENABLE_FEAT_RAS
Manish Pandeyef54fba2022-10-11 17:28:14 +0100326 /* Check Exception Class to ensure SError, as this function should
327 * only be invoked for SError. If that is not the case, which implies
328 * either an HW error or programming error, panic.
329 */
330 ubfx x2, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH
331 cmp x2, EC_SERROR
Govindraj Rajaa796b1b2023-01-16 17:35:07 +0000332 b.ne el3_panic
Jeenu Viswambharan9d4c9c12018-05-17 09:52:36 +0100333 /*
334 * Check for Implementation Defined Syndrome. If so, skip checking
335 * Uncontainable error type from the syndrome as the format is unknown.
336 */
337 tbnz x1, #SERROR_IDS_BIT, 1f
338
Manish Pandeyef54fba2022-10-11 17:28:14 +0100339 /* AET only valid when DFSC is 0x11 */
340 ubfx x2, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
341 cmp x2, #DFSC_SERROR
342 b.ne 1f
343
Jeenu Viswambharan9d4c9c12018-05-17 09:52:36 +0100344 /*
345 * Check for Uncontainable error type. If so, route to the platform
346 * fatal error handler rather than the generic EA one.
347 */
Manish Pandeyef54fba2022-10-11 17:28:14 +0100348 ubfx x3, x1, #EABORT_AET_SHIFT, #EABORT_AET_WIDTH
349 cmp x3, #ERROR_STATUS_UET_UC
Jeenu Viswambharan9d4c9c12018-05-17 09:52:36 +0100350 b.ne 1f
351
352 no_ret plat_handle_uncontainable_ea
3531:
354#endif
355
356 b ea_proceed
357endfunc delegate_async_ea
358
359
360/*
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100361 * Delegate External Abort handling to platform's EA handler. This function
362 * assumes that all GP registers have been saved by the caller.
363 *
364 * x0: EA reason
365 * x1: EA syndrome
366 */
367func ea_proceed
Jeenu Viswambharan93bc4bd2018-05-17 11:24:01 +0100368 /*
369 * If the ESR loaded earlier is not zero, we were processing an EA
370 * already, and this is a double fault.
371 */
372 ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3]
373 cbz x5, 1f
374 no_ret plat_handle_double_fault
375
3761:
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100377 /* Save EL3 state */
378 mrs x2, spsr_el3
379 mrs x3, elr_el3
380 stp x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
381
382 /*
383 * Save ESR as handling might involve lower ELs, and returning back to
384 * EL3 from there would trample the original ESR.
385 */
386 mrs x4, scr_el3
387 mrs x5, esr_el3
388 stp x4, x5, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
389
390 /*
391 * Setup rest of arguments, and call platform External Abort handler.
392 *
393 * x0: EA reason (already in place)
394 * x1: Exception syndrome (already in place).
395 * x2: Cookie (unused for now).
396 * x3: Context pointer.
397 * x4: Flags (security state from SCR for now).
398 */
399 mov x2, xzr
400 mov x3, sp
401 ubfx x4, x4, #0, #1
402
403 /* Switch to runtime stack */
404 ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100405 msr spsel, #MODE_SP_EL0
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100406 mov sp, x5
407
408 mov x29, x30
Jeenu Viswambharan476c29f2018-02-19 12:25:53 +0000409#if ENABLE_ASSERTIONS
410 /* Stash the stack pointer */
411 mov x28, sp
412#endif
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100413 bl plat_ea_handler
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100414
Jeenu Viswambharan476c29f2018-02-19 12:25:53 +0000415#if ENABLE_ASSERTIONS
416 /*
417 * Error handling flows might involve long jumps; so upon returning from
418 * the platform error handler, validate that the we've completely
419 * unwound the stack.
420 */
421 mov x27, sp
422 cmp x28, x27
423 ASM_ASSERT(eq)
424#endif
425
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100426 /* Make SP point to context */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100427 msr spsel, #MODE_SP_ELX
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100428
Jeenu Viswambharan93bc4bd2018-05-17 11:24:01 +0100429 /* Restore EL3 state and ESR */
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100430 ldp x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
431 msr spsr_el3, x1
432 msr elr_el3, x2
433
434 /* Restore ESR_EL3 and SCR_EL3 */
435 ldp x3, x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
436 msr scr_el3, x3
437 msr esr_el3, x4
438
Jeenu Viswambharan93bc4bd2018-05-17 11:24:01 +0100439#if ENABLE_ASSERTIONS
440 cmp x4, xzr
441 ASM_ASSERT(ne)
442#endif
443
444 /* Clear ESR storage */
445 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3]
446
447 ret x29
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100448endfunc ea_proceed