blob: 28d218737bdcc967aabab0e0cd5a010475f5379a [file] [log] [blame]
Jeenu Viswambharane86a2472018-07-05 15:24:45 +01001/*
Daniel Boulby95fb1aa2022-01-19 11:20:05 +00002 * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved.
Varun Wadekard1c1ef32022-08-03 12:01:36 +01003 * Copyright (c) 2022, NVIDIA Corporation. All rights reserved.
Jeenu Viswambharane86a2472018-07-05 15:24:45 +01004 *
5 * SPDX-License-Identifier: BSD-3-Clause
6 */
7
8
Jeenu Viswambharan93bc4bd2018-05-17 11:24:01 +01009#include <assert_macros.S>
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010010#include <asm_macros.S>
Jeenu Viswambharan476c29f2018-02-19 12:25:53 +000011#include <assert_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000012#include <bl31/ea_handle.h>
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010013#include <context.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000014#include <lib/extensions/ras_arch.h>
laurenw-arm94accd32019-08-20 15:51:24 -050015#include <cpu_macros.S>
16#include <context.h>
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010017
Manish Pandeyc918c182023-01-11 21:53:02 +000018 .globl handle_lower_el_sync_ea
19 .globl handle_lower_el_async_ea
Manish Pandey07952fb2023-05-25 13:46:14 +010020 .globl handle_pending_async_ea
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010021/*
Manish Pandeya4752e22023-10-11 11:52:24 +010022 * This function handles Synchronous External Aborts from lower EL.
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010023 *
Manish Pandeyc918c182023-01-11 21:53:02 +000024 * It delegates the handling of the EA to platform handler, and upon successfully
25 * handling the EA, exits EL3; otherwise panics.
26 *
27 * This function assumes x30 has been saved.
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010028 */
Manish Pandeyc918c182023-01-11 21:53:02 +000029func handle_lower_el_sync_ea
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010030 mrs x30, esr_el3
31 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
32
33 /* Check for I/D aborts from lower EL */
34 cmp x30, #EC_IABORT_LOWER_EL
35 b.eq 1f
36
37 cmp x30, #EC_DABORT_LOWER_EL
laurenw-arm94accd32019-08-20 15:51:24 -050038 b.eq 1f
39
Manish Pandeya4752e22023-10-11 11:52:24 +010040 /* EA other than above are unhandled exceptions */
41 no_ret report_unhandled_exception
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100421:
Alexei Fedorov503bbf32019-08-13 15:17:53 +010043 /*
Alexei Fedorovf41355c2019-09-13 14:11:59 +010044 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
Boyan Karatoteved85cf72022-12-06 09:03:42 +000045 * Also save PMCR_EL0 and set the PSTATE to a known state.
Alexei Fedorov503bbf32019-08-13 15:17:53 +010046 */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +000047 bl prepare_el3_entry
Alexei Fedorov503bbf32019-08-13 15:17:53 +010048
Antonio Nino Diaz25cda672019-02-19 11:53:51 +000049#if ENABLE_PAUTH
Alexei Fedorovf41355c2019-09-13 14:11:59 +010050 /* Load and program APIAKey firmware key */
51 bl pauth_load_bl31_apiakey
Antonio Nino Diaz25cda672019-02-19 11:53:51 +000052#endif
Antonio Nino Diaz594811b2019-01-31 11:58:00 +000053
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010054 /* Setup exception class and syndrome arguments for platform handler */
55 mov x0, #ERROR_EA_SYNC
56 mrs x1, esr_el3
Jan Dabrosfa015982019-12-02 13:30:03 +010057 bl delegate_sync_ea
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010058
Jan Dabrosfa015982019-12-02 13:30:03 +010059 /* el3_exit assumes SP_EL0 on entry */
60 msr spsel, #MODE_SP_EL0
61 b el3_exit
Manish Pandeyc918c182023-01-11 21:53:02 +000062endfunc handle_lower_el_sync_ea
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010063
64
65/*
66 * This function handles SErrors from lower ELs.
67 *
Manish Pandeyc918c182023-01-11 21:53:02 +000068 * It delegates the handling of the EA to platform handler, and upon successfully
69 * handling the EA, exits EL3; otherwise panics.
70 *
71 * This function assumes x30 has been saved.
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010072 */
Manish Pandeyc918c182023-01-11 21:53:02 +000073func handle_lower_el_async_ea
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010074
Alexei Fedorov503bbf32019-08-13 15:17:53 +010075 /*
Alexei Fedorovf41355c2019-09-13 14:11:59 +010076 * Save general purpose and ARMv8.3-PAuth registers (if enabled).
Boyan Karatoteved85cf72022-12-06 09:03:42 +000077 * Also save PMCR_EL0 and set the PSTATE to a known state.
Alexei Fedorov503bbf32019-08-13 15:17:53 +010078 */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +000079 bl prepare_el3_entry
Alexei Fedorov503bbf32019-08-13 15:17:53 +010080
Antonio Nino Diaz25cda672019-02-19 11:53:51 +000081#if ENABLE_PAUTH
Alexei Fedorovf41355c2019-09-13 14:11:59 +010082 /* Load and program APIAKey firmware key */
83 bl pauth_load_bl31_apiakey
Antonio Nino Diaz25cda672019-02-19 11:53:51 +000084#endif
Antonio Nino Diaz594811b2019-01-31 11:58:00 +000085
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010086 /* Setup exception class and syndrome arguments for platform handler */
87 mov x0, #ERROR_EA_ASYNC
88 mrs x1, esr_el3
Jan Dabrosfa015982019-12-02 13:30:03 +010089 bl delegate_async_ea
90
91 /* el3_exit assumes SP_EL0 on entry */
92 msr spsel, #MODE_SP_EL0
93 b el3_exit
Manish Pandeyc918c182023-01-11 21:53:02 +000094endfunc handle_lower_el_async_ea
Jeenu Viswambharane86a2472018-07-05 15:24:45 +010095
Manish Pandey07952fb2023-05-25 13:46:14 +010096/*
Manish Pandeya4752e22023-10-11 11:52:24 +010097 * Handler for async EA from lower EL synchronized at EL3 entry in FFH mode.
Manish Pandey07952fb2023-05-25 13:46:14 +010098 *
99 * This scenario may arise when there is an error (EA) in the system which is not
100 * yet signaled to PE while executing in lower EL. During entry into EL3, the errors
101 * are synchronized either implicitly or explicitly causing async EA to pend at EL3.
102 *
Manish Pandeya4752e22023-10-11 11:52:24 +0100103 * On detecting the pending EA (via ISR_EL1.A), if the EA routing model is Firmware
104 * First handling (FFH, SCR_EL3.EA = 1) this handler first handles the pending EA
105 * and then handles the original exception.
Manish Pandey07952fb2023-05-25 13:46:14 +0100106 *
107 * This function assumes x30 has been saved.
108 */
Manish Pandey07952fb2023-05-25 13:46:14 +0100109func handle_pending_async_ea
110 /*
111 * Prepare for nested handling of EA. Stash sysregs clobbered by nested
112 * exception and handler
113 */
114 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR]
115 mrs x30, esr_el3
116 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3]
117 mrs x30, spsr_el3
118 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3]
119 mrs x30, elr_el3
120 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
121
122 mov x30, #1
123 str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG]
124 /*
125 * Restore the original x30 saved as part of entering EL3. This is not
126 * required for the current function but for EL3 SError vector entry
127 * once PSTATE.A bit is unmasked. We restore x30 and then the same
128 * value is stored in EL3 SError vector entry.
129 */
130 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
131
132 /*
133 * After clearing PSTATE.A bit pending SError will trigger at current EL.
134 * Put explicit synchronization event to ensure newly unmasked interrupt
135 * is taken immediately.
136 */
137 unmask_async_ea
138
139 /* Restore the original exception information along with zeroing the storage */
140 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
141 msr elr_el3, x30
142 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ELR_EL3]
143 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3]
144 msr spsr_el3, x30
145 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_SPSR_EL3]
146 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3]
147 msr esr_el3, x30
148 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_ESR_EL3]
149
150 /*
151 * If the original exception corresponds to SError from lower El, eret back
152 * to lower EL, otherwise return to vector table for original exception handling.
153 */
154 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
155 cmp x30, #EC_SERROR
156 ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR]
157 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_SAVED_GPREG_LR]
158 b.eq 1f
159 ret
1601:
161 exception_return
162endfunc handle_pending_async_ea
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100163
164/*
Jeenu Viswambharan9d4c9c12018-05-17 09:52:36 +0100165 * Prelude for Synchronous External Abort handling. This function assumes that
166 * all GP registers have been saved by the caller.
167 *
168 * x0: EA reason
169 * x1: EA syndrome
170 */
171func delegate_sync_ea
Manish Pandeyf90a73c2023-10-10 15:42:19 +0100172#if ENABLE_FEAT_RAS
Jeenu Viswambharan9d4c9c12018-05-17 09:52:36 +0100173 /*
174 * Check for Uncontainable error type. If so, route to the platform
175 * fatal error handler rather than the generic EA one.
176 */
177 ubfx x2, x1, #EABORT_SET_SHIFT, #EABORT_SET_WIDTH
178 cmp x2, #ERROR_STATUS_SET_UC
179 b.ne 1f
180
181 /* Check fault status code */
182 ubfx x3, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
183 cmp x3, #SYNC_EA_FSC
184 b.ne 1f
185
186 no_ret plat_handle_uncontainable_ea
1871:
188#endif
189
190 b ea_proceed
191endfunc delegate_sync_ea
192
193
194/*
195 * Prelude for Asynchronous External Abort handling. This function assumes that
196 * all GP registers have been saved by the caller.
197 *
198 * x0: EA reason
199 * x1: EA syndrome
200 */
201func delegate_async_ea
Manish Pandeyf90a73c2023-10-10 15:42:19 +0100202#if ENABLE_FEAT_RAS
Manish Pandeyef54fba2022-10-11 17:28:14 +0100203 /* Check Exception Class to ensure SError, as this function should
204 * only be invoked for SError. If that is not the case, which implies
205 * either an HW error or programming error, panic.
206 */
207 ubfx x2, x1, #ESR_EC_SHIFT, #ESR_EC_LENGTH
208 cmp x2, EC_SERROR
Govindraj Rajaa796b1b2023-01-16 17:35:07 +0000209 b.ne el3_panic
Jeenu Viswambharan9d4c9c12018-05-17 09:52:36 +0100210 /*
211 * Check for Implementation Defined Syndrome. If so, skip checking
212 * Uncontainable error type from the syndrome as the format is unknown.
213 */
214 tbnz x1, #SERROR_IDS_BIT, 1f
215
Manish Pandeyef54fba2022-10-11 17:28:14 +0100216 /* AET only valid when DFSC is 0x11 */
217 ubfx x2, x1, #EABORT_DFSC_SHIFT, #EABORT_DFSC_WIDTH
218 cmp x2, #DFSC_SERROR
219 b.ne 1f
220
Jeenu Viswambharan9d4c9c12018-05-17 09:52:36 +0100221 /*
222 * Check for Uncontainable error type. If so, route to the platform
223 * fatal error handler rather than the generic EA one.
224 */
Manish Pandeyef54fba2022-10-11 17:28:14 +0100225 ubfx x3, x1, #EABORT_AET_SHIFT, #EABORT_AET_WIDTH
226 cmp x3, #ERROR_STATUS_UET_UC
Jeenu Viswambharan9d4c9c12018-05-17 09:52:36 +0100227 b.ne 1f
228
229 no_ret plat_handle_uncontainable_ea
2301:
231#endif
232
233 b ea_proceed
234endfunc delegate_async_ea
235
236
237/*
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100238 * Delegate External Abort handling to platform's EA handler. This function
239 * assumes that all GP registers have been saved by the caller.
240 *
241 * x0: EA reason
242 * x1: EA syndrome
243 */
244func ea_proceed
Jeenu Viswambharan93bc4bd2018-05-17 11:24:01 +0100245 /*
246 * If the ESR loaded earlier is not zero, we were processing an EA
247 * already, and this is a double fault.
248 */
249 ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3]
250 cbz x5, 1f
251 no_ret plat_handle_double_fault
252
2531:
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100254 /* Save EL3 state */
255 mrs x2, spsr_el3
256 mrs x3, elr_el3
257 stp x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
258
259 /*
260 * Save ESR as handling might involve lower ELs, and returning back to
261 * EL3 from there would trample the original ESR.
262 */
263 mrs x4, scr_el3
264 mrs x5, esr_el3
265 stp x4, x5, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
266
267 /*
268 * Setup rest of arguments, and call platform External Abort handler.
269 *
270 * x0: EA reason (already in place)
271 * x1: Exception syndrome (already in place).
272 * x2: Cookie (unused for now).
273 * x3: Context pointer.
274 * x4: Flags (security state from SCR for now).
275 */
276 mov x2, xzr
277 mov x3, sp
278 ubfx x4, x4, #0, #1
279
280 /* Switch to runtime stack */
281 ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100282 msr spsel, #MODE_SP_EL0
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100283 mov sp, x5
284
285 mov x29, x30
Jeenu Viswambharan476c29f2018-02-19 12:25:53 +0000286#if ENABLE_ASSERTIONS
287 /* Stash the stack pointer */
288 mov x28, sp
289#endif
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100290 bl plat_ea_handler
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100291
Jeenu Viswambharan476c29f2018-02-19 12:25:53 +0000292#if ENABLE_ASSERTIONS
293 /*
294 * Error handling flows might involve long jumps; so upon returning from
295 * the platform error handler, validate that the we've completely
296 * unwound the stack.
297 */
298 mov x27, sp
299 cmp x28, x27
300 ASM_ASSERT(eq)
301#endif
302
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100303 /* Make SP point to context */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100304 msr spsel, #MODE_SP_ELX
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100305
Jeenu Viswambharan93bc4bd2018-05-17 11:24:01 +0100306 /* Restore EL3 state and ESR */
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100307 ldp x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
308 msr spsr_el3, x1
309 msr elr_el3, x2
310
311 /* Restore ESR_EL3 and SCR_EL3 */
312 ldp x3, x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
313 msr scr_el3, x3
314 msr esr_el3, x4
315
Jeenu Viswambharan93bc4bd2018-05-17 11:24:01 +0100316#if ENABLE_ASSERTIONS
317 cmp x4, xzr
318 ASM_ASSERT(ne)
319#endif
320
321 /* Clear ESR storage */
322 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_ESR_EL3]
323
324 ret x29
Jeenu Viswambharane86a2472018-07-05 15:24:45 +0100325endfunc ea_proceed