blob: 771fcdcb9deecd14b3b5fc775de90889faf0dc5e [file] [log] [blame]
Achin Gupta9ac63c52014-01-16 12:08:03 +00001/*
Rohit Mathew3dc3cad2022-11-11 18:45:11 +00002 * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
Achin Gupta9ac63c52014-01-16 12:08:03 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta9ac63c52014-01-16 12:08:03 +00005 */
6
Dan Handley2bd4ef22014-04-09 13:14:54 +01007#include <arch.h>
Andrew Thoelke38bde412014-03-18 13:46:55 +00008#include <asm_macros.S>
Jan Dabrosfa015982019-12-02 13:30:03 +01009#include <assert_macros.S>
Dan Handley2bd4ef22014-04-09 13:14:54 +010010#include <context.h>
Manish V Badarkhee07e8082020-07-23 12:43:25 +010011#include <el3_common_macros.S>
Achin Gupta9ac63c52014-01-16 12:08:03 +000012
Max Shvetsovbdf502d2020-02-25 13:56:19 +000013#if CTX_INCLUDE_EL2_REGS
Zelalem Aweke5362beb2022-04-04 17:42:48 -050014 .global el2_sysregs_context_save_common
15 .global el2_sysregs_context_restore_common
Zelalem Aweke5362beb2022-04-04 17:42:48 -050016#if CTX_INCLUDE_MTE_REGS
17 .global el2_sysregs_context_save_mte
18 .global el2_sysregs_context_restore_mte
19#endif /* CTX_INCLUDE_MTE_REGS */
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000020#endif /* CTX_INCLUDE_EL2_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +000021
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010022 .global el1_sysregs_context_save
23 .global el1_sysregs_context_restore
24#if CTX_INCLUDE_FPREGS
25 .global fpregs_context_save
26 .global fpregs_context_restore
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000027#endif /* CTX_INCLUDE_FPREGS */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +000028 .global prepare_el3_entry
Alexei Fedorovf41355c2019-09-13 14:11:59 +010029 .global restore_gp_pmcr_pauth_regs
Manish V Badarkhee07e8082020-07-23 12:43:25 +010030 .global save_and_update_ptw_el1_sys_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010031 .global el3_exit
32
Max Shvetsovbdf502d2020-02-25 13:56:19 +000033#if CTX_INCLUDE_EL2_REGS
34
35/* -----------------------------------------------------
Zelalem Aweke5362beb2022-04-04 17:42:48 -050036 * The following functions strictly follow the AArch64
Max Shvetsovcf784f72021-03-31 19:00:38 +010037 * PCS to use x9-x16 (temporary caller-saved registers)
Zelalem Aweke5362beb2022-04-04 17:42:48 -050038 * to save/restore EL2 system register context.
39 * el2_sysregs_context_save/restore_common functions
40 * save and restore registers that are common to all
41 * configurations. The rest of the functions save and
42 * restore EL2 system registers that are present when a
43 * particular feature is enabled. All functions assume
44 * that 'x0' is pointing to a 'el2_sys_regs' structure
45 * where the register context will be saved/restored.
Max Shvetsovc9e2c922020-02-17 16:15:47 +000046 *
47 * The following registers are not added.
48 * AMEVCNTVOFF0<n>_EL2
49 * AMEVCNTVOFF1<n>_EL2
50 * ICH_AP0R<n>_EL2
51 * ICH_AP1R<n>_EL2
52 * ICH_LR<n>_EL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +000053 * -----------------------------------------------------
54 */
Zelalem Aweke5362beb2022-04-04 17:42:48 -050055func el2_sysregs_context_save_common
Max Shvetsovbdf502d2020-02-25 13:56:19 +000056 mrs x9, actlr_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000057 mrs x10, afsr0_el2
58 stp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000059
Max Shvetsovc9e2c922020-02-17 16:15:47 +000060 mrs x11, afsr1_el2
61 mrs x12, amair_el2
62 stp x11, x12, [x0, #CTX_AFSR1_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000063
Max Shvetsovc9e2c922020-02-17 16:15:47 +000064 mrs x13, cnthctl_el2
Max Shvetsovcf784f72021-03-31 19:00:38 +010065 mrs x14, cntvoff_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000066 stp x13, x14, [x0, #CTX_CNTHCTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000067
Max Shvetsovcf784f72021-03-31 19:00:38 +010068 mrs x15, cptr_el2
69 str x15, [x0, #CTX_CPTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000070
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +010071#if CTX_INCLUDE_AARCH32_REGS
Max Shvetsovcf784f72021-03-31 19:00:38 +010072 mrs x16, dbgvcr32_el2
73 str x16, [x0, #CTX_DBGVCR32_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000074#endif /* CTX_INCLUDE_AARCH32_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +000075
Max Shvetsovcf784f72021-03-31 19:00:38 +010076 mrs x9, elr_el2
77 mrs x10, esr_el2
78 stp x9, x10, [x0, #CTX_ELR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000079
Max Shvetsovcf784f72021-03-31 19:00:38 +010080 mrs x11, far_el2
81 mrs x12, hacr_el2
82 stp x11, x12, [x0, #CTX_FAR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000083
Max Shvetsovcf784f72021-03-31 19:00:38 +010084 mrs x13, hcr_el2
85 mrs x14, hpfar_el2
86 stp x13, x14, [x0, #CTX_HCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000087
Max Shvetsovcf784f72021-03-31 19:00:38 +010088 mrs x15, hstr_el2
89 mrs x16, ICC_SRE_EL2
90 stp x15, x16, [x0, #CTX_HSTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000091
Max Shvetsovcf784f72021-03-31 19:00:38 +010092 mrs x9, ICH_HCR_EL2
93 mrs x10, ICH_VMCR_EL2
94 stp x9, x10, [x0, #CTX_ICH_HCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000095
Max Shvetsovcf784f72021-03-31 19:00:38 +010096 mrs x11, mair_el2
97 mrs x12, mdcr_el2
98 stp x11, x12, [x0, #CTX_MAIR_EL2]
99
Max Shvetsovcf784f72021-03-31 19:00:38 +0100100 mrs x14, sctlr_el2
101 str x14, [x0, #CTX_SCTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000102
Max Shvetsovcf784f72021-03-31 19:00:38 +0100103 mrs x15, spsr_el2
104 mrs x16, sp_el2
105 stp x15, x16, [x0, #CTX_SPSR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000106
Max Shvetsovcf784f72021-03-31 19:00:38 +0100107 mrs x9, tcr_el2
108 mrs x10, tpidr_el2
109 stp x9, x10, [x0, #CTX_TCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000110
Max Shvetsovcf784f72021-03-31 19:00:38 +0100111 mrs x11, ttbr0_el2
112 mrs x12, vbar_el2
113 stp x11, x12, [x0, #CTX_TTBR0_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000114
Max Shvetsovcf784f72021-03-31 19:00:38 +0100115 mrs x13, vmpidr_el2
116 mrs x14, vpidr_el2
117 stp x13, x14, [x0, #CTX_VMPIDR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000118
Max Shvetsovcf784f72021-03-31 19:00:38 +0100119 mrs x15, vtcr_el2
120 mrs x16, vttbr_el2
121 stp x15, x16, [x0, #CTX_VTCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000122 ret
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500123endfunc el2_sysregs_context_save_common
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000124
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500125func el2_sysregs_context_restore_common
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000126 ldp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000127 msr actlr_el2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000128 msr afsr0_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000129
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000130 ldp x11, x12, [x0, #CTX_AFSR1_EL2]
131 msr afsr1_el2, x11
132 msr amair_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000133
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000134 ldp x13, x14, [x0, #CTX_CNTHCTL_EL2]
135 msr cnthctl_el2, x13
Max Shvetsovcf784f72021-03-31 19:00:38 +0100136 msr cntvoff_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000137
Max Shvetsovcf784f72021-03-31 19:00:38 +0100138 ldr x15, [x0, #CTX_CPTR_EL2]
139 msr cptr_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000140
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +0100141#if CTX_INCLUDE_AARCH32_REGS
Max Shvetsovcf784f72021-03-31 19:00:38 +0100142 ldr x16, [x0, #CTX_DBGVCR32_EL2]
143 msr dbgvcr32_el2, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000144#endif /* CTX_INCLUDE_AARCH32_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000145
Max Shvetsovcf784f72021-03-31 19:00:38 +0100146 ldp x9, x10, [x0, #CTX_ELR_EL2]
147 msr elr_el2, x9
148 msr esr_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000149
Max Shvetsovcf784f72021-03-31 19:00:38 +0100150 ldp x11, x12, [x0, #CTX_FAR_EL2]
151 msr far_el2, x11
152 msr hacr_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000153
Max Shvetsovcf784f72021-03-31 19:00:38 +0100154 ldp x13, x14, [x0, #CTX_HCR_EL2]
155 msr hcr_el2, x13
156 msr hpfar_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000157
Max Shvetsovcf784f72021-03-31 19:00:38 +0100158 ldp x15, x16, [x0, #CTX_HSTR_EL2]
159 msr hstr_el2, x15
160 msr ICC_SRE_EL2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000161
Max Shvetsovcf784f72021-03-31 19:00:38 +0100162 ldp x9, x10, [x0, #CTX_ICH_HCR_EL2]
163 msr ICH_HCR_EL2, x9
164 msr ICH_VMCR_EL2, x10
165
166 ldp x11, x12, [x0, #CTX_MAIR_EL2]
167 msr mair_el2, x11
168 msr mdcr_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000169
Max Shvetsovcf784f72021-03-31 19:00:38 +0100170 ldr x14, [x0, #CTX_SCTLR_EL2]
171 msr sctlr_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000172
Max Shvetsovcf784f72021-03-31 19:00:38 +0100173 ldp x15, x16, [x0, #CTX_SPSR_EL2]
174 msr spsr_el2, x15
175 msr sp_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000176
Max Shvetsovcf784f72021-03-31 19:00:38 +0100177 ldp x9, x10, [x0, #CTX_TCR_EL2]
178 msr tcr_el2, x9
179 msr tpidr_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000180
Max Shvetsovcf784f72021-03-31 19:00:38 +0100181 ldp x11, x12, [x0, #CTX_TTBR0_EL2]
182 msr ttbr0_el2, x11
183 msr vbar_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000184
Max Shvetsovcf784f72021-03-31 19:00:38 +0100185 ldp x13, x14, [x0, #CTX_VMPIDR_EL2]
186 msr vmpidr_el2, x13
187 msr vpidr_el2, x14
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100188
Max Shvetsovcf784f72021-03-31 19:00:38 +0100189 ldp x15, x16, [x0, #CTX_VTCR_EL2]
190 msr vtcr_el2, x15
191 msr vttbr_el2, x16
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500192 ret
193endfunc el2_sysregs_context_restore_common
194
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000195#if CTX_INCLUDE_MTE_REGS
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500196func el2_sysregs_context_save_mte
197 mrs x9, TFSR_EL2
198 str x9, [x0, #CTX_TFSR_EL2]
199 ret
200endfunc el2_sysregs_context_save_mte
201
202func el2_sysregs_context_restore_mte
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100203 ldr x9, [x0, #CTX_TFSR_EL2]
204 msr TFSR_EL2, x9
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500205 ret
206endfunc el2_sysregs_context_restore_mte
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000207#endif /* CTX_INCLUDE_MTE_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000208
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000209#endif /* CTX_INCLUDE_EL2_REGS */
210
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100211/* ------------------------------------------------------------------
212 * The following function strictly follows the AArch64 PCS to use
213 * x9-x17 (temporary caller-saved registers) to save EL1 system
214 * register context. It assumes that 'x0' is pointing to a
215 * 'el1_sys_regs' structure where the register context will be saved.
216 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000217 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000218func el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000219
220 mrs x9, spsr_el1
221 mrs x10, elr_el1
222 stp x9, x10, [x0, #CTX_SPSR_EL1]
223
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100224#if !ERRATA_SPECULATIVE_AT
Achin Gupta9ac63c52014-01-16 12:08:03 +0000225 mrs x15, sctlr_el1
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100226 mrs x16, tcr_el1
Achin Gupta9ac63c52014-01-16 12:08:03 +0000227 stp x15, x16, [x0, #CTX_SCTLR_EL1]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000228#endif /* ERRATA_SPECULATIVE_AT */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000229
230 mrs x17, cpacr_el1
231 mrs x9, csselr_el1
232 stp x17, x9, [x0, #CTX_CPACR_EL1]
233
234 mrs x10, sp_el1
235 mrs x11, esr_el1
236 stp x10, x11, [x0, #CTX_SP_EL1]
237
238 mrs x12, ttbr0_el1
239 mrs x13, ttbr1_el1
240 stp x12, x13, [x0, #CTX_TTBR0_EL1]
241
242 mrs x14, mair_el1
243 mrs x15, amair_el1
244 stp x14, x15, [x0, #CTX_MAIR_EL1]
245
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100246 mrs x16, actlr_el1
Achin Gupta9ac63c52014-01-16 12:08:03 +0000247 mrs x17, tpidr_el1
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100248 stp x16, x17, [x0, #CTX_ACTLR_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000249
250 mrs x9, tpidr_el0
251 mrs x10, tpidrro_el0
252 stp x9, x10, [x0, #CTX_TPIDR_EL0]
253
Achin Gupta9ac63c52014-01-16 12:08:03 +0000254 mrs x13, par_el1
255 mrs x14, far_el1
256 stp x13, x14, [x0, #CTX_PAR_EL1]
257
258 mrs x15, afsr0_el1
259 mrs x16, afsr1_el1
260 stp x15, x16, [x0, #CTX_AFSR0_EL1]
261
262 mrs x17, contextidr_el1
263 mrs x9, vbar_el1
264 stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
265
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100266 /* Save AArch32 system registers if the build has instructed so */
267#if CTX_INCLUDE_AARCH32_REGS
268 mrs x11, spsr_abt
269 mrs x12, spsr_und
270 stp x11, x12, [x0, #CTX_SPSR_ABT]
271
272 mrs x13, spsr_irq
273 mrs x14, spsr_fiq
274 stp x13, x14, [x0, #CTX_SPSR_IRQ]
275
276 mrs x15, dacr32_el2
277 mrs x16, ifsr32_el2
278 stp x15, x16, [x0, #CTX_DACR32_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000279#endif /* CTX_INCLUDE_AARCH32_REGS */
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100280
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100281 /* Save NS timer registers if the build has instructed so */
282#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000283 mrs x10, cntp_ctl_el0
284 mrs x11, cntp_cval_el0
285 stp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
286
287 mrs x12, cntv_ctl_el0
288 mrs x13, cntv_cval_el0
289 stp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
290
291 mrs x14, cntkctl_el1
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100292 str x14, [x0, #CTX_CNTKCTL_EL1]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000293#endif /* NS_TIMER_SWITCH */
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100294
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100295 /* Save MTE system registers if the build has instructed so */
296#if CTX_INCLUDE_MTE_REGS
297 mrs x15, TFSRE0_EL1
298 mrs x16, TFSR_EL1
299 stp x15, x16, [x0, #CTX_TFSRE0_EL1]
300
301 mrs x9, RGSR_EL1
302 mrs x10, GCR_EL1
303 stp x9, x10, [x0, #CTX_RGSR_EL1]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000304#endif /* CTX_INCLUDE_MTE_REGS */
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100305
Achin Gupta9ac63c52014-01-16 12:08:03 +0000306 ret
Kévin Petita877c252015-03-24 14:03:57 +0000307endfunc el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000308
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100309/* ------------------------------------------------------------------
310 * The following function strictly follows the AArch64 PCS to use
311 * x9-x17 (temporary caller-saved registers) to restore EL1 system
312 * register context. It assumes that 'x0' is pointing to a
313 * 'el1_sys_regs' structure from where the register context will be
314 * restored
315 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000316 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000317func el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000318
319 ldp x9, x10, [x0, #CTX_SPSR_EL1]
320 msr spsr_el1, x9
321 msr elr_el1, x10
322
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100323#if !ERRATA_SPECULATIVE_AT
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100324 ldp x15, x16, [x0, #CTX_SCTLR_EL1]
325 msr sctlr_el1, x15
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100326 msr tcr_el1, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000327#endif /* ERRATA_SPECULATIVE_AT */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000328
329 ldp x17, x9, [x0, #CTX_CPACR_EL1]
330 msr cpacr_el1, x17
331 msr csselr_el1, x9
332
333 ldp x10, x11, [x0, #CTX_SP_EL1]
334 msr sp_el1, x10
335 msr esr_el1, x11
336
337 ldp x12, x13, [x0, #CTX_TTBR0_EL1]
338 msr ttbr0_el1, x12
339 msr ttbr1_el1, x13
340
341 ldp x14, x15, [x0, #CTX_MAIR_EL1]
342 msr mair_el1, x14
343 msr amair_el1, x15
344
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100345 ldp x16, x17, [x0, #CTX_ACTLR_EL1]
346 msr actlr_el1, x16
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100347 msr tpidr_el1, x17
Achin Gupta9ac63c52014-01-16 12:08:03 +0000348
349 ldp x9, x10, [x0, #CTX_TPIDR_EL0]
350 msr tpidr_el0, x9
351 msr tpidrro_el0, x10
352
Achin Gupta9ac63c52014-01-16 12:08:03 +0000353 ldp x13, x14, [x0, #CTX_PAR_EL1]
354 msr par_el1, x13
355 msr far_el1, x14
356
357 ldp x15, x16, [x0, #CTX_AFSR0_EL1]
358 msr afsr0_el1, x15
359 msr afsr1_el1, x16
360
361 ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
362 msr contextidr_el1, x17
363 msr vbar_el1, x9
364
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100365 /* Restore AArch32 system registers if the build has instructed so */
366#if CTX_INCLUDE_AARCH32_REGS
367 ldp x11, x12, [x0, #CTX_SPSR_ABT]
368 msr spsr_abt, x11
369 msr spsr_und, x12
370
371 ldp x13, x14, [x0, #CTX_SPSR_IRQ]
372 msr spsr_irq, x13
373 msr spsr_fiq, x14
374
375 ldp x15, x16, [x0, #CTX_DACR32_EL2]
376 msr dacr32_el2, x15
377 msr ifsr32_el2, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000378#endif /* CTX_INCLUDE_AARCH32_REGS */
379
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100380 /* Restore NS timer registers if the build has instructed so */
381#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000382 ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
383 msr cntp_ctl_el0, x10
384 msr cntp_cval_el0, x11
385
386 ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
387 msr cntv_ctl_el0, x12
388 msr cntv_cval_el0, x13
389
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100390 ldr x14, [x0, #CTX_CNTKCTL_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000391 msr cntkctl_el1, x14
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000392#endif /* NS_TIMER_SWITCH */
393
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100394 /* Restore MTE system registers if the build has instructed so */
395#if CTX_INCLUDE_MTE_REGS
396 ldp x11, x12, [x0, #CTX_TFSRE0_EL1]
397 msr TFSRE0_EL1, x11
398 msr TFSR_EL1, x12
399
400 ldp x13, x14, [x0, #CTX_RGSR_EL1]
401 msr RGSR_EL1, x13
402 msr GCR_EL1, x14
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000403#endif /* CTX_INCLUDE_MTE_REGS */
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100404
Achin Gupta9ac63c52014-01-16 12:08:03 +0000405 /* No explict ISB required here as ERET covers it */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000406 ret
Kévin Petita877c252015-03-24 14:03:57 +0000407endfunc el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000408
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100409/* ------------------------------------------------------------------
410 * The following function follows the aapcs_64 strictly to use
411 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
412 * to save floating point register context. It assumes that 'x0' is
413 * pointing to a 'fp_regs' structure where the register context will
Achin Gupta9ac63c52014-01-16 12:08:03 +0000414 * be saved.
415 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100416 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
417 * However currently we don't use VFP registers nor set traps in
418 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000419 *
420 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100421 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000422 */
Juan Castillo258e94f2014-06-25 17:26:36 +0100423#if CTX_INCLUDE_FPREGS
Andrew Thoelke38bde412014-03-18 13:46:55 +0000424func fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000425 stp q0, q1, [x0, #CTX_FP_Q0]
426 stp q2, q3, [x0, #CTX_FP_Q2]
427 stp q4, q5, [x0, #CTX_FP_Q4]
428 stp q6, q7, [x0, #CTX_FP_Q6]
429 stp q8, q9, [x0, #CTX_FP_Q8]
430 stp q10, q11, [x0, #CTX_FP_Q10]
431 stp q12, q13, [x0, #CTX_FP_Q12]
432 stp q14, q15, [x0, #CTX_FP_Q14]
433 stp q16, q17, [x0, #CTX_FP_Q16]
434 stp q18, q19, [x0, #CTX_FP_Q18]
435 stp q20, q21, [x0, #CTX_FP_Q20]
436 stp q22, q23, [x0, #CTX_FP_Q22]
437 stp q24, q25, [x0, #CTX_FP_Q24]
438 stp q26, q27, [x0, #CTX_FP_Q26]
439 stp q28, q29, [x0, #CTX_FP_Q28]
440 stp q30, q31, [x0, #CTX_FP_Q30]
441
442 mrs x9, fpsr
443 str x9, [x0, #CTX_FP_FPSR]
444
445 mrs x10, fpcr
446 str x10, [x0, #CTX_FP_FPCR]
447
David Cunadod1a1fd42017-10-20 11:30:57 +0100448#if CTX_INCLUDE_AARCH32_REGS
449 mrs x11, fpexc32_el2
450 str x11, [x0, #CTX_FP_FPEXC32_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000451#endif /* CTX_INCLUDE_AARCH32_REGS */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000452 ret
Kévin Petita877c252015-03-24 14:03:57 +0000453endfunc fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000454
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100455/* ------------------------------------------------------------------
456 * The following function follows the aapcs_64 strictly to use x9-x17
457 * (temporary caller-saved registers according to AArch64 PCS) to
458 * restore floating point register context. It assumes that 'x0' is
459 * pointing to a 'fp_regs' structure from where the register context
Achin Gupta9ac63c52014-01-16 12:08:03 +0000460 * will be restored.
461 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100462 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
463 * However currently we don't use VFP registers nor set traps in
464 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000465 *
466 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100467 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000468 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000469func fpregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000470 ldp q0, q1, [x0, #CTX_FP_Q0]
471 ldp q2, q3, [x0, #CTX_FP_Q2]
472 ldp q4, q5, [x0, #CTX_FP_Q4]
473 ldp q6, q7, [x0, #CTX_FP_Q6]
474 ldp q8, q9, [x0, #CTX_FP_Q8]
475 ldp q10, q11, [x0, #CTX_FP_Q10]
476 ldp q12, q13, [x0, #CTX_FP_Q12]
477 ldp q14, q15, [x0, #CTX_FP_Q14]
478 ldp q16, q17, [x0, #CTX_FP_Q16]
479 ldp q18, q19, [x0, #CTX_FP_Q18]
480 ldp q20, q21, [x0, #CTX_FP_Q20]
481 ldp q22, q23, [x0, #CTX_FP_Q22]
482 ldp q24, q25, [x0, #CTX_FP_Q24]
483 ldp q26, q27, [x0, #CTX_FP_Q26]
484 ldp q28, q29, [x0, #CTX_FP_Q28]
485 ldp q30, q31, [x0, #CTX_FP_Q30]
486
487 ldr x9, [x0, #CTX_FP_FPSR]
488 msr fpsr, x9
489
Soby Mathewe77e1162015-12-03 09:42:50 +0000490 ldr x10, [x0, #CTX_FP_FPCR]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000491 msr fpcr, x10
492
David Cunadod1a1fd42017-10-20 11:30:57 +0100493#if CTX_INCLUDE_AARCH32_REGS
494 ldr x11, [x0, #CTX_FP_FPEXC32_EL2]
495 msr fpexc32_el2, x11
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000496#endif /* CTX_INCLUDE_AARCH32_REGS */
497
Achin Gupta9ac63c52014-01-16 12:08:03 +0000498 /*
499 * No explict ISB required here as ERET to
Sandrine Bailleuxf4119ec2015-12-17 13:58:58 +0000500 * switch to secure EL1 or non-secure world
Achin Gupta9ac63c52014-01-16 12:08:03 +0000501 * covers it
502 */
503
504 ret
Kévin Petita877c252015-03-24 14:03:57 +0000505endfunc fpregs_context_restore
Juan Castillo258e94f2014-06-25 17:26:36 +0100506#endif /* CTX_INCLUDE_FPREGS */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100507
Daniel Boulby928747f2021-05-25 18:09:34 +0100508 /*
Manish Pandey62d532a2022-11-17 15:47:05 +0000509 * Set SCR_EL3.EA bit to enable SErrors at EL3
510 */
511 .macro enable_serror_at_el3
512 mrs x8, scr_el3
513 orr x8, x8, #SCR_EA_BIT
514 msr scr_el3, x8
515 .endm
516
517 /*
Daniel Boulby928747f2021-05-25 18:09:34 +0100518 * Set the PSTATE bits not set when the exception was taken as
519 * described in the AArch64.TakeException() pseudocode function
520 * in ARM DDI 0487F.c page J1-7635 to a default value.
521 */
522 .macro set_unset_pstate_bits
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000523 /*
524 * If Data Independent Timing (DIT) functionality is implemented,
525 * always enable DIT in EL3
526 */
Daniel Boulby928747f2021-05-25 18:09:34 +0100527#if ENABLE_FEAT_DIT
Andre Przywara1f55c412023-01-26 16:47:52 +0000528#if ENABLE_FEAT_DIT == 2
529 mrs x8, id_aa64pfr0_el1
530 and x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT)
531 cbz x8, 1f
532#endif
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000533 mov x8, #DIT_BIT
534 msr DIT, x8
Andre Przywara1f55c412023-01-26 16:47:52 +00005351:
Daniel Boulby928747f2021-05-25 18:09:34 +0100536#endif /* ENABLE_FEAT_DIT */
537 .endm /* set_unset_pstate_bits */
538
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100539/* ------------------------------------------------------------------
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000540 * The following macro is used to save and restore all the general
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100541 * purpose and ARMv8.3-PAuth (if enabled) registers.
Jayanth Dodderi Chidanand4ec78ad2022-09-19 23:32:08 +0100542 * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
543 * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
544 * needs not to be saved/restored during world switch.
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100545 *
546 * Ideally we would only save and restore the callee saved registers
547 * when a world switch occurs but that type of implementation is more
548 * complex. So currently we will always save and restore these
549 * registers on entry and exit of EL3.
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100550 * clobbers: x18
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100551 * ------------------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100552 */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000553 .macro save_gp_pmcr_pauth_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100554 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
555 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
556 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
557 stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
558 stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
559 stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
560 stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
561 stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
562 stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
563 stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
564 stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
565 stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
566 stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
567 stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
568 stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
569 mrs x18, sp_el0
570 str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
Boyan Karatotev05504ba2023-02-15 13:21:50 +0000571
572 /* PMUv3 is presumed to be always present */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100573 mrs x9, pmcr_el0
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100574 str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100575 /* Disable cycle counter when event counting is prohibited */
Boyan Karatoteved85cf72022-12-06 09:03:42 +0000576 orr x9, x9, #PMCR_EL0_DP_BIT
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100577 msr pmcr_el0, x9
578 isb
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100579#if CTX_INCLUDE_PAUTH_REGS
580 /* ----------------------------------------------------------
581 * Save the ARMv8.3-PAuth keys as they are not banked
582 * by exception level
583 * ----------------------------------------------------------
584 */
585 add x19, sp, #CTX_PAUTH_REGS_OFFSET
586
587 mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */
588 mrs x21, APIAKeyHi_EL1
589 mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */
590 mrs x23, APIBKeyHi_EL1
591 mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */
592 mrs x25, APDAKeyHi_EL1
593 mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */
594 mrs x27, APDBKeyHi_EL1
595 mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */
596 mrs x29, APGAKeyHi_EL1
597
598 stp x20, x21, [x19, #CTX_PACIAKEY_LO]
599 stp x22, x23, [x19, #CTX_PACIBKEY_LO]
600 stp x24, x25, [x19, #CTX_PACDAKEY_LO]
601 stp x26, x27, [x19, #CTX_PACDBKEY_LO]
602 stp x28, x29, [x19, #CTX_PACGAKEY_LO]
603#endif /* CTX_INCLUDE_PAUTH_REGS */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000604 .endm /* save_gp_pmcr_pauth_regs */
605
606/* -----------------------------------------------------------------
Daniel Boulby928747f2021-05-25 18:09:34 +0100607 * This function saves the context and sets the PSTATE to a known
608 * state, preparing entry to el3.
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000609 * Save all the general purpose and ARMv8.3-PAuth (if enabled)
610 * registers.
Daniel Boulby928747f2021-05-25 18:09:34 +0100611 * Then set any of the PSTATE bits that are not set by hardware
612 * according to the Aarch64.TakeException pseudocode in the Arm
613 * Architecture Reference Manual to a default value for EL3.
614 * clobbers: x17
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000615 * -----------------------------------------------------------------
616 */
617func prepare_el3_entry
618 save_gp_pmcr_pauth_regs
Manish Pandey62d532a2022-11-17 15:47:05 +0000619 enable_serror_at_el3
Daniel Boulby928747f2021-05-25 18:09:34 +0100620 /*
621 * Set the PSTATE bits not described in the Aarch64.TakeException
622 * pseudocode to their default values.
623 */
624 set_unset_pstate_bits
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100625 ret
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000626endfunc prepare_el3_entry
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100627
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100628/* ------------------------------------------------------------------
629 * This function restores ARMv8.3-PAuth (if enabled) and all general
630 * purpose registers except x30 from the CPU context.
631 * x30 register must be explicitly restored by the caller.
632 * ------------------------------------------------------------------
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000633 */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100634func restore_gp_pmcr_pauth_regs
635#if CTX_INCLUDE_PAUTH_REGS
636 /* Restore the ARMv8.3 PAuth keys */
637 add x10, sp, #CTX_PAUTH_REGS_OFFSET
638
639 ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */
640 ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */
641 ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */
642 ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */
643 ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */
644
645 msr APIAKeyLo_EL1, x0
646 msr APIAKeyHi_EL1, x1
647 msr APIBKeyLo_EL1, x2
648 msr APIBKeyHi_EL1, x3
649 msr APDAKeyLo_EL1, x4
650 msr APDAKeyHi_EL1, x5
651 msr APDBKeyLo_EL1, x6
652 msr APDBKeyHi_EL1, x7
653 msr APGAKeyLo_EL1, x8
654 msr APGAKeyHi_EL1, x9
655#endif /* CTX_INCLUDE_PAUTH_REGS */
Boyan Karatotev05504ba2023-02-15 13:21:50 +0000656
657 /* PMUv3 is presumed to be always present */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100658 ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
659 msr pmcr_el0, x0
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100660 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
661 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100662 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
663 ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
664 ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
665 ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
666 ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
667 ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000668 ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100669 ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
670 ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
671 ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
672 ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
673 ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000674 ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
675 msr sp_el0, x28
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100676 ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000677 ret
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100678endfunc restore_gp_pmcr_pauth_regs
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000679
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100680/*
681 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
682 * registers and update EL1 registers to disable stage1 and stage2
683 * page table walk
684 */
685func save_and_update_ptw_el1_sys_regs
686 /* ----------------------------------------------------------
687 * Save only sctlr_el1 and tcr_el1 registers
688 * ----------------------------------------------------------
689 */
690 mrs x29, sctlr_el1
691 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
692 mrs x29, tcr_el1
693 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
694
695 /* ------------------------------------------------------------
696 * Must follow below order in order to disable page table
697 * walk for lower ELs (EL1 and EL0). First step ensures that
698 * page table walk is disabled for stage1 and second step
699 * ensures that page table walker should use TCR_EL1.EPDx
700 * bits to perform address translation. ISB ensures that CPU
701 * does these 2 steps in order.
702 *
703 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
704 * stage1.
705 * 2. Enable MMU bit to avoid identity mapping via stage2
706 * and force TCR_EL1.EPDx to be used by the page table
707 * walker.
708 * ------------------------------------------------------------
709 */
710 orr x29, x29, #(TCR_EPD0_BIT)
711 orr x29, x29, #(TCR_EPD1_BIT)
712 msr tcr_el1, x29
713 isb
714 mrs x29, sctlr_el1
715 orr x29, x29, #SCTLR_M_BIT
716 msr sctlr_el1, x29
717 isb
718
719 ret
720endfunc save_and_update_ptw_el1_sys_regs
721
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100722/* ------------------------------------------------------------------
723 * This routine assumes that the SP_EL3 is pointing to a valid
724 * context structure from where the gp regs and other special
725 * registers can be retrieved.
726 * ------------------------------------------------------------------
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +0000727 */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100728func el3_exit
Jan Dabrosfa015982019-12-02 13:30:03 +0100729#if ENABLE_ASSERTIONS
730 /* el3_exit assumes SP_EL0 on entry */
731 mrs x17, spsel
732 cmp x17, #MODE_SP_EL0
733 ASM_ASSERT(eq)
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000734#endif /* ENABLE_ASSERTIONS */
Jan Dabrosfa015982019-12-02 13:30:03 +0100735
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100736 /* ----------------------------------------------------------
737 * Save the current SP_EL0 i.e. the EL3 runtime stack which
738 * will be used for handling the next SMC.
739 * Then switch to SP_EL3.
740 * ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100741 */
742 mov x17, sp
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100743 msr spsel, #MODE_SP_ELX
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100744 str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
745
Max Shvetsovc4502772021-03-22 11:59:37 +0000746#if IMAGE_BL31
747 /* ----------------------------------------------------------
Arunachalam Ganapathycac7d162021-07-08 09:35:57 +0100748 * Restore CPTR_EL3.
Max Shvetsovc4502772021-03-22 11:59:37 +0000749 * ZCR is only restored if SVE is supported and enabled.
750 * Synchronization is required before zcr_el3 is addressed.
751 * ----------------------------------------------------------
752 */
Max Shvetsovc4502772021-03-22 11:59:37 +0000753 ldp x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
754 msr cptr_el3, x19
755
756 ands x19, x19, #CPTR_EZ_BIT
757 beq sve_not_enabled
758
759 isb
760 msr S3_6_C1_C2_0, x20 /* zcr_el3 */
761sve_not_enabled:
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000762#endif /* IMAGE_BL31 */
Max Shvetsovc4502772021-03-22 11:59:37 +0000763
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100764#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100765 /* ----------------------------------------------------------
766 * Restore mitigation state as it was on entry to EL3
767 * ----------------------------------------------------------
768 */
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100769 ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100770 cbz x17, 1f
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100771 blr x17
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +00007721:
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000773#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
774
Andre Przywara870627e2023-01-27 12:25:49 +0000775/*
776 * This is a hot path, so we don't want to do some actual FEAT_RAS runtime
777 * detection here. The "esb" is a cheaper variant, so using "dsb" in the
778 * ENABLE_FEAT_RAS==2 case is not ideal, but won't hurt.
779 */
780#if IMAGE_BL31 && ENABLE_FEAT_RAS == 1
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100781 /* ----------------------------------------------------------
782 * Issue Error Synchronization Barrier to synchronize SErrors
783 * before exiting EL3. We're running with EAs unmasked, so
784 * any synchronized errors would be taken immediately;
785 * therefore no need to inspect DISR_EL1 register.
786 * ----------------------------------------------------------
787 */
788 esb
Madhukar Pappireddyfba25722020-07-24 03:27:12 -0500789#else
790 dsb sy
Manish Pandeyd419e222023-02-13 12:39:17 +0000791#endif /* IMAGE_BL31 && ENABLE_FEAT_RAS */
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000792
Manish Pandey53bc59a2022-11-17 14:43:15 +0000793 /* ----------------------------------------------------------
794 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
795 * ----------------------------------------------------------
796 */
797 ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
798 ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
799 msr scr_el3, x18
800 msr spsr_el3, x16
801 msr elr_el3, x17
802
803 restore_ptw_el1_sys_regs
804
805 /* ----------------------------------------------------------
806 * Restore general purpose (including x30), PMCR_EL0 and
807 * ARMv8.3-PAuth registers.
808 * Exit EL3 via ERET to a lower exception level.
809 * ----------------------------------------------------------
810 */
811 bl restore_gp_pmcr_pauth_regs
812 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
813
Madhukar Pappireddyfba25722020-07-24 03:27:12 -0500814#ifdef IMAGE_BL31
815 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000816#endif /* IMAGE_BL31 */
817
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -0800818 exception_return
Antonio Nino Diaz594811b2019-01-31 11:58:00 +0000819
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100820endfunc el3_exit