blob: dce1f5ab7eccecd3028a0f9b1ecdee97fcd5f912 [file] [log] [blame]
Jeenu Viswambharane86a2472018-07-05 15:24:45 +01001/*
2 * Copyright (c) 2018, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7
8#include <asm_macros.S>
9#include <context.h>
10#include <ea_handle.h>
11
12
13 .globl handle_lower_el_ea_esb
14 .globl enter_lower_el_sync_ea
15 .globl enter_lower_el_async_ea
16
17
18/*
19 * Function to delegate External Aborts synchronized by ESB instruction at EL3
20 * vector entry. This function assumes GP registers x0-x29 have been saved, and
21 * are available for use. It delegates the handling of the EA to platform
22 * handler, and returns only upon successfully handling the EA; otherwise
23 * panics. On return from this function, the original exception handler is
24 * expected to resume.
25 */
26func handle_lower_el_ea_esb
27 mov x0, #ERROR_EA_ESB
28 mrs x1, DISR_EL1
29 b ea_proceed
30endfunc handle_lower_el_ea_esb
31
32
33/*
34 * This function forms the tail end of Synchronous Exception entry from lower
35 * EL, and expects to handle only Synchronous External Aborts from lower EL. If
36 * any other kind of exception is detected, then this function reports unhandled
37 * exception.
38 *
39 * Since it's part of exception vector, this function doesn't expect any GP
40 * registers to have been saved. It delegates the handling of the EA to platform
41 * handler, and upon successfully handling the EA, exits EL3; otherwise panics.
42 */
43func enter_lower_el_sync_ea
44 /*
45 * Explicitly save x30 so as to free up a register and to enable
46 * branching.
47 */
48 str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
49
50 mrs x30, esr_el3
51 ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
52
53 /* Check for I/D aborts from lower EL */
54 cmp x30, #EC_IABORT_LOWER_EL
55 b.eq 1f
56
57 cmp x30, #EC_DABORT_LOWER_EL
58 b.ne 2f
59
601:
61 /* Test for EA bit in the instruction syndrome */
62 mrs x30, esr_el3
63 tbz x30, #ESR_ISS_EABORT_EA_BIT, 2f
64
65 /* Save GP registers */
66 bl save_gp_registers
67
68 /* Setup exception class and syndrome arguments for platform handler */
69 mov x0, #ERROR_EA_SYNC
70 mrs x1, esr_el3
71 adr x30, el3_exit
72 b ea_proceed
73
742:
75 /* Synchronous exceptions other than the above are assumed to be EA */
76 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
77 no_ret report_unhandled_exception
78endfunc enter_lower_el_sync_ea
79
80
81/*
82 * This function handles SErrors from lower ELs.
83 *
84 * Since it's part of exception vector, this function doesn't expect any GP
85 * registers to have been saved. It delegates the handling of the EA to platform
86 * handler, and upon successfully handling the EA, exits EL3; otherwise panics.
87 */
88func enter_lower_el_async_ea
89 /*
90 * Explicitly save x30 so as to free up a register and to enable
91 * branching
92 */
93 str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
94
95 /* Save GP registers */
96 bl save_gp_registers
97
98 /* Setup exception class and syndrome arguments for platform handler */
99 mov x0, #ERROR_EA_ASYNC
100 mrs x1, esr_el3
101 adr x30, el3_exit
102 b ea_proceed
103endfunc enter_lower_el_async_ea
104
105
106/*
107 * Delegate External Abort handling to platform's EA handler. This function
108 * assumes that all GP registers have been saved by the caller.
109 *
110 * x0: EA reason
111 * x1: EA syndrome
112 */
113func ea_proceed
114 /* Save EL3 state */
115 mrs x2, spsr_el3
116 mrs x3, elr_el3
117 stp x2, x3, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
118
119 /*
120 * Save ESR as handling might involve lower ELs, and returning back to
121 * EL3 from there would trample the original ESR.
122 */
123 mrs x4, scr_el3
124 mrs x5, esr_el3
125 stp x4, x5, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
126
127 /*
128 * Setup rest of arguments, and call platform External Abort handler.
129 *
130 * x0: EA reason (already in place)
131 * x1: Exception syndrome (already in place).
132 * x2: Cookie (unused for now).
133 * x3: Context pointer.
134 * x4: Flags (security state from SCR for now).
135 */
136 mov x2, xzr
137 mov x3, sp
138 ubfx x4, x4, #0, #1
139
140 /* Switch to runtime stack */
141 ldr x5, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
142 msr spsel, #0
143 mov sp, x5
144
145 mov x29, x30
146 bl plat_ea_handler
147 mov x30, x29
148
149 /* Make SP point to context */
150 msr spsel, #1
151
152 /* Restore EL3 state */
153 ldp x1, x2, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
154 msr spsr_el3, x1
155 msr elr_el3, x2
156
157 /* Restore ESR_EL3 and SCR_EL3 */
158 ldp x3, x4, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
159 msr scr_el3, x3
160 msr esr_el3, x4
161
162 ret
163endfunc ea_proceed