blob: f5fed72d065ddd56a71c5663038bef10d4781921 [file] [log] [blame]
Achin Gupta9ac63c52014-01-16 12:08:03 +00001/*
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +00002 * Copyright (c) 2013-2019, ARM Limited and Contributors. All rights reserved.
Achin Gupta9ac63c52014-01-16 12:08:03 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta9ac63c52014-01-16 12:08:03 +00005 */
6
Dan Handley2bd4ef22014-04-09 13:14:54 +01007#include <arch.h>
Andrew Thoelke38bde412014-03-18 13:46:55 +00008#include <asm_macros.S>
Dan Handley2bd4ef22014-04-09 13:14:54 +01009#include <context.h>
Achin Gupta9ac63c52014-01-16 12:08:03 +000010
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010011 .global el1_sysregs_context_save
12 .global el1_sysregs_context_restore
13#if CTX_INCLUDE_FPREGS
14 .global fpregs_context_save
15 .global fpregs_context_restore
16#endif
Antonio Nino Diaz594811b2019-01-31 11:58:00 +000017#if CTX_INCLUDE_PAUTH_REGS
18 .global pauth_context_restore
19 .global pauth_context_save
20#endif
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010021 .global save_gp_registers
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +000022 .global restore_gp_registers
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010023 .global restore_gp_registers_eret
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010024 .global el3_exit
25
Achin Gupta9ac63c52014-01-16 12:08:03 +000026/* -----------------------------------------------------
27 * The following function strictly follows the AArch64
28 * PCS to use x9-x17 (temporary caller-saved registers)
Achin Gupta9ac63c52014-01-16 12:08:03 +000029 * to save EL1 system register context. It assumes that
30 * 'x0' is pointing to a 'el1_sys_regs' structure where
31 * the register context will be saved.
32 * -----------------------------------------------------
33 */
Andrew Thoelke38bde412014-03-18 13:46:55 +000034func el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +000035
36 mrs x9, spsr_el1
37 mrs x10, elr_el1
38 stp x9, x10, [x0, #CTX_SPSR_EL1]
39
Achin Gupta9ac63c52014-01-16 12:08:03 +000040 mrs x15, sctlr_el1
41 mrs x16, actlr_el1
42 stp x15, x16, [x0, #CTX_SCTLR_EL1]
43
44 mrs x17, cpacr_el1
45 mrs x9, csselr_el1
46 stp x17, x9, [x0, #CTX_CPACR_EL1]
47
48 mrs x10, sp_el1
49 mrs x11, esr_el1
50 stp x10, x11, [x0, #CTX_SP_EL1]
51
52 mrs x12, ttbr0_el1
53 mrs x13, ttbr1_el1
54 stp x12, x13, [x0, #CTX_TTBR0_EL1]
55
56 mrs x14, mair_el1
57 mrs x15, amair_el1
58 stp x14, x15, [x0, #CTX_MAIR_EL1]
59
60 mrs x16, tcr_el1
61 mrs x17, tpidr_el1
62 stp x16, x17, [x0, #CTX_TCR_EL1]
63
64 mrs x9, tpidr_el0
65 mrs x10, tpidrro_el0
66 stp x9, x10, [x0, #CTX_TPIDR_EL0]
67
Achin Gupta9ac63c52014-01-16 12:08:03 +000068 mrs x13, par_el1
69 mrs x14, far_el1
70 stp x13, x14, [x0, #CTX_PAR_EL1]
71
72 mrs x15, afsr0_el1
73 mrs x16, afsr1_el1
74 stp x15, x16, [x0, #CTX_AFSR0_EL1]
75
76 mrs x17, contextidr_el1
77 mrs x9, vbar_el1
78 stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
79
David Cunado4168f2f2017-10-02 17:41:39 +010080 mrs x10, pmcr_el0
81 str x10, [x0, #CTX_PMCR_EL0]
82
Soby Mathewd75d2ba2016-05-17 14:01:32 +010083 /* Save AArch32 system registers if the build has instructed so */
84#if CTX_INCLUDE_AARCH32_REGS
85 mrs x11, spsr_abt
86 mrs x12, spsr_und
87 stp x11, x12, [x0, #CTX_SPSR_ABT]
88
89 mrs x13, spsr_irq
90 mrs x14, spsr_fiq
91 stp x13, x14, [x0, #CTX_SPSR_IRQ]
92
93 mrs x15, dacr32_el2
94 mrs x16, ifsr32_el2
95 stp x15, x16, [x0, #CTX_DACR32_EL2]
Soby Mathewd75d2ba2016-05-17 14:01:32 +010096#endif
97
Jeenu Viswambharand1b60152014-05-12 15:28:47 +010098 /* Save NS timer registers if the build has instructed so */
99#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000100 mrs x10, cntp_ctl_el0
101 mrs x11, cntp_cval_el0
102 stp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
103
104 mrs x12, cntv_ctl_el0
105 mrs x13, cntv_cval_el0
106 stp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
107
108 mrs x14, cntkctl_el1
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100109 str x14, [x0, #CTX_CNTKCTL_EL1]
110#endif
111
Achin Gupta9ac63c52014-01-16 12:08:03 +0000112 ret
Kévin Petita877c252015-03-24 14:03:57 +0000113endfunc el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000114
115/* -----------------------------------------------------
116 * The following function strictly follows the AArch64
117 * PCS to use x9-x17 (temporary caller-saved registers)
118 * to restore EL1 system register context. It assumes
119 * that 'x0' is pointing to a 'el1_sys_regs' structure
120 * from where the register context will be restored
121 * -----------------------------------------------------
122 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000123func el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000124
125 ldp x9, x10, [x0, #CTX_SPSR_EL1]
126 msr spsr_el1, x9
127 msr elr_el1, x10
128
Achin Gupta9ac63c52014-01-16 12:08:03 +0000129 ldp x15, x16, [x0, #CTX_SCTLR_EL1]
130 msr sctlr_el1, x15
131 msr actlr_el1, x16
132
133 ldp x17, x9, [x0, #CTX_CPACR_EL1]
134 msr cpacr_el1, x17
135 msr csselr_el1, x9
136
137 ldp x10, x11, [x0, #CTX_SP_EL1]
138 msr sp_el1, x10
139 msr esr_el1, x11
140
141 ldp x12, x13, [x0, #CTX_TTBR0_EL1]
142 msr ttbr0_el1, x12
143 msr ttbr1_el1, x13
144
145 ldp x14, x15, [x0, #CTX_MAIR_EL1]
146 msr mair_el1, x14
147 msr amair_el1, x15
148
149 ldp x16, x17, [x0, #CTX_TCR_EL1]
150 msr tcr_el1, x16
151 msr tpidr_el1, x17
152
153 ldp x9, x10, [x0, #CTX_TPIDR_EL0]
154 msr tpidr_el0, x9
155 msr tpidrro_el0, x10
156
Achin Gupta9ac63c52014-01-16 12:08:03 +0000157 ldp x13, x14, [x0, #CTX_PAR_EL1]
158 msr par_el1, x13
159 msr far_el1, x14
160
161 ldp x15, x16, [x0, #CTX_AFSR0_EL1]
162 msr afsr0_el1, x15
163 msr afsr1_el1, x16
164
165 ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
166 msr contextidr_el1, x17
167 msr vbar_el1, x9
168
David Cunado4168f2f2017-10-02 17:41:39 +0100169 ldr x10, [x0, #CTX_PMCR_EL0]
170 msr pmcr_el0, x10
171
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100172 /* Restore AArch32 system registers if the build has instructed so */
173#if CTX_INCLUDE_AARCH32_REGS
174 ldp x11, x12, [x0, #CTX_SPSR_ABT]
175 msr spsr_abt, x11
176 msr spsr_und, x12
177
178 ldp x13, x14, [x0, #CTX_SPSR_IRQ]
179 msr spsr_irq, x13
180 msr spsr_fiq, x14
181
182 ldp x15, x16, [x0, #CTX_DACR32_EL2]
183 msr dacr32_el2, x15
184 msr ifsr32_el2, x16
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100185#endif
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100186 /* Restore NS timer registers if the build has instructed so */
187#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000188 ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
189 msr cntp_ctl_el0, x10
190 msr cntp_cval_el0, x11
191
192 ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
193 msr cntv_ctl_el0, x12
194 msr cntv_cval_el0, x13
195
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100196 ldr x14, [x0, #CTX_CNTKCTL_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000197 msr cntkctl_el1, x14
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100198#endif
199
Achin Gupta9ac63c52014-01-16 12:08:03 +0000200 /* No explict ISB required here as ERET covers it */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000201 ret
Kévin Petita877c252015-03-24 14:03:57 +0000202endfunc el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000203
204/* -----------------------------------------------------
Sandrine Bailleux046cd3f2014-08-06 11:27:23 +0100205 * The following function follows the aapcs_64 strictly
Achin Gupta9ac63c52014-01-16 12:08:03 +0000206 * to use x9-x17 (temporary caller-saved registers
207 * according to AArch64 PCS) to save floating point
208 * register context. It assumes that 'x0' is pointing to
209 * a 'fp_regs' structure where the register context will
210 * be saved.
211 *
212 * Access to VFP registers will trap if CPTR_EL3.TFP is
213 * set. However currently we don't use VFP registers
214 * nor set traps in Trusted Firmware, and assume it's
215 * cleared
216 *
217 * TODO: Revisit when VFP is used in secure world
218 * -----------------------------------------------------
219 */
Juan Castillo258e94f2014-06-25 17:26:36 +0100220#if CTX_INCLUDE_FPREGS
Andrew Thoelke38bde412014-03-18 13:46:55 +0000221func fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000222 stp q0, q1, [x0, #CTX_FP_Q0]
223 stp q2, q3, [x0, #CTX_FP_Q2]
224 stp q4, q5, [x0, #CTX_FP_Q4]
225 stp q6, q7, [x0, #CTX_FP_Q6]
226 stp q8, q9, [x0, #CTX_FP_Q8]
227 stp q10, q11, [x0, #CTX_FP_Q10]
228 stp q12, q13, [x0, #CTX_FP_Q12]
229 stp q14, q15, [x0, #CTX_FP_Q14]
230 stp q16, q17, [x0, #CTX_FP_Q16]
231 stp q18, q19, [x0, #CTX_FP_Q18]
232 stp q20, q21, [x0, #CTX_FP_Q20]
233 stp q22, q23, [x0, #CTX_FP_Q22]
234 stp q24, q25, [x0, #CTX_FP_Q24]
235 stp q26, q27, [x0, #CTX_FP_Q26]
236 stp q28, q29, [x0, #CTX_FP_Q28]
237 stp q30, q31, [x0, #CTX_FP_Q30]
238
239 mrs x9, fpsr
240 str x9, [x0, #CTX_FP_FPSR]
241
242 mrs x10, fpcr
243 str x10, [x0, #CTX_FP_FPCR]
244
David Cunadod1a1fd42017-10-20 11:30:57 +0100245#if CTX_INCLUDE_AARCH32_REGS
246 mrs x11, fpexc32_el2
247 str x11, [x0, #CTX_FP_FPEXC32_EL2]
248#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000249 ret
Kévin Petita877c252015-03-24 14:03:57 +0000250endfunc fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000251
252/* -----------------------------------------------------
253 * The following function follows the aapcs_64 strictly
254 * to use x9-x17 (temporary caller-saved registers
255 * according to AArch64 PCS) to restore floating point
256 * register context. It assumes that 'x0' is pointing to
257 * a 'fp_regs' structure from where the register context
258 * will be restored.
259 *
260 * Access to VFP registers will trap if CPTR_EL3.TFP is
261 * set. However currently we don't use VFP registers
262 * nor set traps in Trusted Firmware, and assume it's
263 * cleared
264 *
265 * TODO: Revisit when VFP is used in secure world
266 * -----------------------------------------------------
267 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000268func fpregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000269 ldp q0, q1, [x0, #CTX_FP_Q0]
270 ldp q2, q3, [x0, #CTX_FP_Q2]
271 ldp q4, q5, [x0, #CTX_FP_Q4]
272 ldp q6, q7, [x0, #CTX_FP_Q6]
273 ldp q8, q9, [x0, #CTX_FP_Q8]
274 ldp q10, q11, [x0, #CTX_FP_Q10]
275 ldp q12, q13, [x0, #CTX_FP_Q12]
276 ldp q14, q15, [x0, #CTX_FP_Q14]
277 ldp q16, q17, [x0, #CTX_FP_Q16]
278 ldp q18, q19, [x0, #CTX_FP_Q18]
279 ldp q20, q21, [x0, #CTX_FP_Q20]
280 ldp q22, q23, [x0, #CTX_FP_Q22]
281 ldp q24, q25, [x0, #CTX_FP_Q24]
282 ldp q26, q27, [x0, #CTX_FP_Q26]
283 ldp q28, q29, [x0, #CTX_FP_Q28]
284 ldp q30, q31, [x0, #CTX_FP_Q30]
285
286 ldr x9, [x0, #CTX_FP_FPSR]
287 msr fpsr, x9
288
Soby Mathewe77e1162015-12-03 09:42:50 +0000289 ldr x10, [x0, #CTX_FP_FPCR]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000290 msr fpcr, x10
291
David Cunadod1a1fd42017-10-20 11:30:57 +0100292#if CTX_INCLUDE_AARCH32_REGS
293 ldr x11, [x0, #CTX_FP_FPEXC32_EL2]
294 msr fpexc32_el2, x11
295#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000296 /*
297 * No explict ISB required here as ERET to
Sandrine Bailleuxf4119ec2015-12-17 13:58:58 +0000298 * switch to secure EL1 or non-secure world
Achin Gupta9ac63c52014-01-16 12:08:03 +0000299 * covers it
300 */
301
302 ret
Kévin Petita877c252015-03-24 14:03:57 +0000303endfunc fpregs_context_restore
Juan Castillo258e94f2014-06-25 17:26:36 +0100304#endif /* CTX_INCLUDE_FPREGS */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100305
Antonio Nino Diaz594811b2019-01-31 11:58:00 +0000306#if CTX_INCLUDE_PAUTH_REGS
307/* -----------------------------------------------------
308 * The following function strictly follows the AArch64
309 * PCS to use x9-x17 (temporary caller-saved registers)
310 * to save the ARMv8.3-PAuth register context. It assumes
311 * that 'sp' is pointing to a 'cpu_context_t' structure
312 * to where the register context will be saved.
313 * -----------------------------------------------------
314 */
315func pauth_context_save
316 add x11, sp, #CTX_PAUTH_REGS_OFFSET
317
318 mrs x9, APIAKeyLo_EL1
319 mrs x10, APIAKeyHi_EL1
320 stp x9, x10, [x11, #CTX_PACIAKEY_LO]
321
322 mrs x9, APIBKeyLo_EL1
323 mrs x10, APIBKeyHi_EL1
324 stp x9, x10, [x11, #CTX_PACIBKEY_LO]
325
326 mrs x9, APDAKeyLo_EL1
327 mrs x10, APDAKeyHi_EL1
328 stp x9, x10, [x11, #CTX_PACDAKEY_LO]
329
330 mrs x9, APDBKeyLo_EL1
331 mrs x10, APDBKeyHi_EL1
332 stp x9, x10, [x11, #CTX_PACDBKEY_LO]
333
334 mrs x9, APGAKeyLo_EL1
335 mrs x10, APGAKeyHi_EL1
336 stp x9, x10, [x11, #CTX_PACGAKEY_LO]
337
338 ret
339endfunc pauth_context_save
340
341/* -----------------------------------------------------
342 * The following function strictly follows the AArch64
343 * PCS to use x9-x17 (temporary caller-saved registers)
344 * to restore the ARMv8.3-PAuth register context. It assumes
345 * that 'sp' is pointing to a 'cpu_context_t' structure
346 * from where the register context will be restored.
347 * -----------------------------------------------------
348 */
349func pauth_context_restore
350 add x11, sp, #CTX_PAUTH_REGS_OFFSET
351
352 ldp x9, x10, [x11, #CTX_PACIAKEY_LO]
353 msr APIAKeyLo_EL1, x9
354 msr APIAKeyHi_EL1, x10
355
356 ldp x9, x10, [x11, #CTX_PACIAKEY_LO]
357 msr APIBKeyLo_EL1, x9
358 msr APIBKeyHi_EL1, x10
359
360 ldp x9, x10, [x11, #CTX_PACDAKEY_LO]
361 msr APDAKeyLo_EL1, x9
362 msr APDAKeyHi_EL1, x10
363
364 ldp x9, x10, [x11, #CTX_PACDBKEY_LO]
365 msr APDBKeyLo_EL1, x9
366 msr APDBKeyHi_EL1, x10
367
368 ldp x9, x10, [x11, #CTX_PACGAKEY_LO]
369 msr APGAKeyLo_EL1, x9
370 msr APGAKeyHi_EL1, x10
371
372 ret
373endfunc pauth_context_restore
374#endif /* CTX_INCLUDE_PAUTH_REGS */
375
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100376/* -----------------------------------------------------
377 * The following functions are used to save and restore
378 * all the general purpose registers. Ideally we would
379 * only save and restore the callee saved registers when
380 * a world switch occurs but that type of implementation
381 * is more complex. So currently we will always save and
382 * restore these registers on entry and exit of EL3.
383 * These are not macros to ensure their invocation fits
384 * within the 32 instructions per exception vector.
385 * clobbers: x18
386 * -----------------------------------------------------
387 */
388func save_gp_registers
389 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
390 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
391 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
392 stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
393 stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
394 stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
395 stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
396 stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
397 stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
398 stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
399 stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
400 stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
401 stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
402 stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
403 stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
404 mrs x18, sp_el0
405 str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
406 ret
407endfunc save_gp_registers
408
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +0000409/* -----------------------------------------------------
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000410 * This function restores all general purpose registers except x30 from the
411 * CPU context. x30 register must be explicitly restored by the caller.
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +0000412 * -----------------------------------------------------
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000413 */
414func restore_gp_registers
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100415 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
416 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100417 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
418 ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
419 ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
420 ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
421 ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
422 ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000423 ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100424 ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
425 ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
426 ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
427 ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
428 ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000429 ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
430 msr sp_el0, x28
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100431 ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000432 ret
433endfunc restore_gp_registers
434
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +0000435/* -----------------------------------------------------
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000436 * Restore general purpose registers (including x30), and exit EL3 via. ERET to
437 * a lower exception level.
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +0000438 * -----------------------------------------------------
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000439 */
440func restore_gp_registers_eret
441 bl restore_gp_registers
442 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
Jeenu Viswambharan9a7ce2f2018-04-04 16:07:11 +0100443
444#if IMAGE_BL31 && RAS_EXTENSION
445 /*
446 * Issue Error Synchronization Barrier to synchronize SErrors before
447 * exiting EL3. We're running with EAs unmasked, so any synchronized
448 * errors would be taken immediately; therefore no need to inspect
449 * DISR_EL1 register.
450 */
451 esb
452#endif
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100453 eret
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000454endfunc restore_gp_registers_eret
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100455
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +0000456/* -----------------------------------------------------
457 * This routine assumes that the SP_EL3 is pointing to
458 * a valid context structure from where the gp regs and
459 * other special registers can be retrieved.
460 * -----------------------------------------------------
461 */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100462func el3_exit
463 /* -----------------------------------------------------
464 * Save the current SP_EL0 i.e. the EL3 runtime stack
465 * which will be used for handling the next SMC. Then
466 * switch to SP_EL3
467 * -----------------------------------------------------
468 */
469 mov x17, sp
470 msr spsel, #1
471 str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
472
473 /* -----------------------------------------------------
474 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
475 * -----------------------------------------------------
476 */
477 ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
478 ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
479 msr scr_el3, x18
480 msr spsr_el3, x16
481 msr elr_el3, x17
482
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100483#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
484 /* Restore mitigation state as it was on entry to EL3 */
485 ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
486 cmp x17, xzr
487 beq 1f
488 blr x17
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +00004891:
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100490#endif
491
Antonio Nino Diaz594811b2019-01-31 11:58:00 +0000492#if CTX_INCLUDE_PAUTH_REGS
493 /* Restore ARMv8.3-PAuth registers */
494 bl pauth_context_restore
495#endif
496
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100497 /* Restore saved general purpose registers and return */
498 b restore_gp_registers_eret
499endfunc el3_exit