blob: 0f2dfeb7787adedd95a990c659eb04df0e8bba56 [file] [log] [blame]
Achin Gupta9ac63c52014-01-16 12:08:03 +00001/*
Rohit Mathew3dc3cad2022-11-11 18:45:11 +00002 * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
Achin Gupta9ac63c52014-01-16 12:08:03 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta9ac63c52014-01-16 12:08:03 +00005 */
6
Dan Handley2bd4ef22014-04-09 13:14:54 +01007#include <arch.h>
Andrew Thoelke38bde412014-03-18 13:46:55 +00008#include <asm_macros.S>
Jan Dabrosfa015982019-12-02 13:30:03 +01009#include <assert_macros.S>
Dan Handley2bd4ef22014-04-09 13:14:54 +010010#include <context.h>
Manish V Badarkhee07e8082020-07-23 12:43:25 +010011#include <el3_common_macros.S>
Achin Gupta9ac63c52014-01-16 12:08:03 +000012
Max Shvetsovbdf502d2020-02-25 13:56:19 +000013#if CTX_INCLUDE_EL2_REGS
Zelalem Aweke5362beb2022-04-04 17:42:48 -050014 .global el2_sysregs_context_save_common
15 .global el2_sysregs_context_restore_common
Zelalem Aweke5362beb2022-04-04 17:42:48 -050016#if CTX_INCLUDE_MTE_REGS
17 .global el2_sysregs_context_save_mte
18 .global el2_sysregs_context_restore_mte
19#endif /* CTX_INCLUDE_MTE_REGS */
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000020#endif /* CTX_INCLUDE_EL2_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +000021
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010022 .global el1_sysregs_context_save
23 .global el1_sysregs_context_restore
24#if CTX_INCLUDE_FPREGS
25 .global fpregs_context_save
26 .global fpregs_context_restore
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000027#endif /* CTX_INCLUDE_FPREGS */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +000028 .global prepare_el3_entry
Alexei Fedorovf41355c2019-09-13 14:11:59 +010029 .global restore_gp_pmcr_pauth_regs
Manish V Badarkhee07e8082020-07-23 12:43:25 +010030 .global save_and_update_ptw_el1_sys_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010031 .global el3_exit
32
Max Shvetsovbdf502d2020-02-25 13:56:19 +000033#if CTX_INCLUDE_EL2_REGS
34
35/* -----------------------------------------------------
Zelalem Aweke5362beb2022-04-04 17:42:48 -050036 * The following functions strictly follow the AArch64
Max Shvetsovcf784f72021-03-31 19:00:38 +010037 * PCS to use x9-x16 (temporary caller-saved registers)
Zelalem Aweke5362beb2022-04-04 17:42:48 -050038 * to save/restore EL2 system register context.
39 * el2_sysregs_context_save/restore_common functions
40 * save and restore registers that are common to all
41 * configurations. The rest of the functions save and
42 * restore EL2 system registers that are present when a
43 * particular feature is enabled. All functions assume
44 * that 'x0' is pointing to a 'el2_sys_regs' structure
45 * where the register context will be saved/restored.
Max Shvetsovc9e2c922020-02-17 16:15:47 +000046 *
47 * The following registers are not added.
48 * AMEVCNTVOFF0<n>_EL2
49 * AMEVCNTVOFF1<n>_EL2
50 * ICH_AP0R<n>_EL2
51 * ICH_AP1R<n>_EL2
52 * ICH_LR<n>_EL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +000053 * -----------------------------------------------------
54 */
Zelalem Aweke5362beb2022-04-04 17:42:48 -050055func el2_sysregs_context_save_common
Max Shvetsovbdf502d2020-02-25 13:56:19 +000056 mrs x9, actlr_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000057 mrs x10, afsr0_el2
58 stp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000059
Max Shvetsovc9e2c922020-02-17 16:15:47 +000060 mrs x11, afsr1_el2
61 mrs x12, amair_el2
62 stp x11, x12, [x0, #CTX_AFSR1_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000063
Max Shvetsovc9e2c922020-02-17 16:15:47 +000064 mrs x13, cnthctl_el2
Max Shvetsovcf784f72021-03-31 19:00:38 +010065 mrs x14, cntvoff_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000066 stp x13, x14, [x0, #CTX_CNTHCTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000067
Max Shvetsovcf784f72021-03-31 19:00:38 +010068 mrs x15, cptr_el2
69 str x15, [x0, #CTX_CPTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000070
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +010071#if CTX_INCLUDE_AARCH32_REGS
Max Shvetsovcf784f72021-03-31 19:00:38 +010072 mrs x16, dbgvcr32_el2
73 str x16, [x0, #CTX_DBGVCR32_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000074#endif /* CTX_INCLUDE_AARCH32_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +000075
Max Shvetsovcf784f72021-03-31 19:00:38 +010076 mrs x9, elr_el2
77 mrs x10, esr_el2
78 stp x9, x10, [x0, #CTX_ELR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000079
Max Shvetsovcf784f72021-03-31 19:00:38 +010080 mrs x11, far_el2
81 mrs x12, hacr_el2
82 stp x11, x12, [x0, #CTX_FAR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000083
Max Shvetsovcf784f72021-03-31 19:00:38 +010084 mrs x13, hcr_el2
85 mrs x14, hpfar_el2
86 stp x13, x14, [x0, #CTX_HCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000087
Max Shvetsovcf784f72021-03-31 19:00:38 +010088 mrs x15, hstr_el2
89 mrs x16, ICC_SRE_EL2
90 stp x15, x16, [x0, #CTX_HSTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000091
Max Shvetsovcf784f72021-03-31 19:00:38 +010092 mrs x9, ICH_HCR_EL2
93 mrs x10, ICH_VMCR_EL2
94 stp x9, x10, [x0, #CTX_ICH_HCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000095
Max Shvetsovcf784f72021-03-31 19:00:38 +010096 mrs x11, mair_el2
97 mrs x12, mdcr_el2
98 stp x11, x12, [x0, #CTX_MAIR_EL2]
99
Max Shvetsovcf784f72021-03-31 19:00:38 +0100100 mrs x14, sctlr_el2
101 str x14, [x0, #CTX_SCTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000102
Max Shvetsovcf784f72021-03-31 19:00:38 +0100103 mrs x15, spsr_el2
104 mrs x16, sp_el2
105 stp x15, x16, [x0, #CTX_SPSR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000106
Max Shvetsovcf784f72021-03-31 19:00:38 +0100107 mrs x9, tcr_el2
108 mrs x10, tpidr_el2
109 stp x9, x10, [x0, #CTX_TCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000110
Max Shvetsovcf784f72021-03-31 19:00:38 +0100111 mrs x11, ttbr0_el2
112 mrs x12, vbar_el2
113 stp x11, x12, [x0, #CTX_TTBR0_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000114
Max Shvetsovcf784f72021-03-31 19:00:38 +0100115 mrs x13, vmpidr_el2
116 mrs x14, vpidr_el2
117 stp x13, x14, [x0, #CTX_VMPIDR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000118
Max Shvetsovcf784f72021-03-31 19:00:38 +0100119 mrs x15, vtcr_el2
120 mrs x16, vttbr_el2
121 stp x15, x16, [x0, #CTX_VTCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000122 ret
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500123endfunc el2_sysregs_context_save_common
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000124
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500125func el2_sysregs_context_restore_common
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000126 ldp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000127 msr actlr_el2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000128 msr afsr0_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000129
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000130 ldp x11, x12, [x0, #CTX_AFSR1_EL2]
131 msr afsr1_el2, x11
132 msr amair_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000133
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000134 ldp x13, x14, [x0, #CTX_CNTHCTL_EL2]
135 msr cnthctl_el2, x13
Max Shvetsovcf784f72021-03-31 19:00:38 +0100136 msr cntvoff_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000137
Max Shvetsovcf784f72021-03-31 19:00:38 +0100138 ldr x15, [x0, #CTX_CPTR_EL2]
139 msr cptr_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000140
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +0100141#if CTX_INCLUDE_AARCH32_REGS
Max Shvetsovcf784f72021-03-31 19:00:38 +0100142 ldr x16, [x0, #CTX_DBGVCR32_EL2]
143 msr dbgvcr32_el2, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000144#endif /* CTX_INCLUDE_AARCH32_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000145
Max Shvetsovcf784f72021-03-31 19:00:38 +0100146 ldp x9, x10, [x0, #CTX_ELR_EL2]
147 msr elr_el2, x9
148 msr esr_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000149
Max Shvetsovcf784f72021-03-31 19:00:38 +0100150 ldp x11, x12, [x0, #CTX_FAR_EL2]
151 msr far_el2, x11
152 msr hacr_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000153
Max Shvetsovcf784f72021-03-31 19:00:38 +0100154 ldp x13, x14, [x0, #CTX_HCR_EL2]
155 msr hcr_el2, x13
156 msr hpfar_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000157
Max Shvetsovcf784f72021-03-31 19:00:38 +0100158 ldp x15, x16, [x0, #CTX_HSTR_EL2]
159 msr hstr_el2, x15
160 msr ICC_SRE_EL2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000161
Max Shvetsovcf784f72021-03-31 19:00:38 +0100162 ldp x9, x10, [x0, #CTX_ICH_HCR_EL2]
163 msr ICH_HCR_EL2, x9
164 msr ICH_VMCR_EL2, x10
165
166 ldp x11, x12, [x0, #CTX_MAIR_EL2]
167 msr mair_el2, x11
168 msr mdcr_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000169
Max Shvetsovcf784f72021-03-31 19:00:38 +0100170 ldr x14, [x0, #CTX_SCTLR_EL2]
171 msr sctlr_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000172
Max Shvetsovcf784f72021-03-31 19:00:38 +0100173 ldp x15, x16, [x0, #CTX_SPSR_EL2]
174 msr spsr_el2, x15
175 msr sp_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000176
Max Shvetsovcf784f72021-03-31 19:00:38 +0100177 ldp x9, x10, [x0, #CTX_TCR_EL2]
178 msr tcr_el2, x9
179 msr tpidr_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000180
Max Shvetsovcf784f72021-03-31 19:00:38 +0100181 ldp x11, x12, [x0, #CTX_TTBR0_EL2]
182 msr ttbr0_el2, x11
183 msr vbar_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000184
Max Shvetsovcf784f72021-03-31 19:00:38 +0100185 ldp x13, x14, [x0, #CTX_VMPIDR_EL2]
186 msr vmpidr_el2, x13
187 msr vpidr_el2, x14
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100188
Max Shvetsovcf784f72021-03-31 19:00:38 +0100189 ldp x15, x16, [x0, #CTX_VTCR_EL2]
190 msr vtcr_el2, x15
191 msr vttbr_el2, x16
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500192 ret
193endfunc el2_sysregs_context_restore_common
194
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000195#if CTX_INCLUDE_MTE_REGS
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500196func el2_sysregs_context_save_mte
197 mrs x9, TFSR_EL2
198 str x9, [x0, #CTX_TFSR_EL2]
199 ret
200endfunc el2_sysregs_context_save_mte
201
202func el2_sysregs_context_restore_mte
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100203 ldr x9, [x0, #CTX_TFSR_EL2]
204 msr TFSR_EL2, x9
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500205 ret
206endfunc el2_sysregs_context_restore_mte
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000207#endif /* CTX_INCLUDE_MTE_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000208
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000209#endif /* CTX_INCLUDE_EL2_REGS */
210
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100211/* ------------------------------------------------------------------
212 * The following function strictly follows the AArch64 PCS to use
213 * x9-x17 (temporary caller-saved registers) to save EL1 system
214 * register context. It assumes that 'x0' is pointing to a
215 * 'el1_sys_regs' structure where the register context will be saved.
216 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000217 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000218func el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000219
220 mrs x9, spsr_el1
221 mrs x10, elr_el1
222 stp x9, x10, [x0, #CTX_SPSR_EL1]
223
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100224#if !ERRATA_SPECULATIVE_AT
Achin Gupta9ac63c52014-01-16 12:08:03 +0000225 mrs x15, sctlr_el1
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100226 mrs x16, tcr_el1
Achin Gupta9ac63c52014-01-16 12:08:03 +0000227 stp x15, x16, [x0, #CTX_SCTLR_EL1]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000228#endif /* ERRATA_SPECULATIVE_AT */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000229
230 mrs x17, cpacr_el1
231 mrs x9, csselr_el1
232 stp x17, x9, [x0, #CTX_CPACR_EL1]
233
234 mrs x10, sp_el1
235 mrs x11, esr_el1
236 stp x10, x11, [x0, #CTX_SP_EL1]
237
238 mrs x12, ttbr0_el1
239 mrs x13, ttbr1_el1
240 stp x12, x13, [x0, #CTX_TTBR0_EL1]
241
242 mrs x14, mair_el1
243 mrs x15, amair_el1
244 stp x14, x15, [x0, #CTX_MAIR_EL1]
245
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100246 mrs x16, actlr_el1
Achin Gupta9ac63c52014-01-16 12:08:03 +0000247 mrs x17, tpidr_el1
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100248 stp x16, x17, [x0, #CTX_ACTLR_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000249
250 mrs x9, tpidr_el0
251 mrs x10, tpidrro_el0
252 stp x9, x10, [x0, #CTX_TPIDR_EL0]
253
Achin Gupta9ac63c52014-01-16 12:08:03 +0000254 mrs x13, par_el1
255 mrs x14, far_el1
256 stp x13, x14, [x0, #CTX_PAR_EL1]
257
258 mrs x15, afsr0_el1
259 mrs x16, afsr1_el1
260 stp x15, x16, [x0, #CTX_AFSR0_EL1]
261
262 mrs x17, contextidr_el1
263 mrs x9, vbar_el1
264 stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
265
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100266 /* Save AArch32 system registers if the build has instructed so */
267#if CTX_INCLUDE_AARCH32_REGS
268 mrs x11, spsr_abt
269 mrs x12, spsr_und
270 stp x11, x12, [x0, #CTX_SPSR_ABT]
271
272 mrs x13, spsr_irq
273 mrs x14, spsr_fiq
274 stp x13, x14, [x0, #CTX_SPSR_IRQ]
275
276 mrs x15, dacr32_el2
277 mrs x16, ifsr32_el2
278 stp x15, x16, [x0, #CTX_DACR32_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000279#endif /* CTX_INCLUDE_AARCH32_REGS */
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100280
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100281 /* Save NS timer registers if the build has instructed so */
282#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000283 mrs x10, cntp_ctl_el0
284 mrs x11, cntp_cval_el0
285 stp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
286
287 mrs x12, cntv_ctl_el0
288 mrs x13, cntv_cval_el0
289 stp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
290
291 mrs x14, cntkctl_el1
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100292 str x14, [x0, #CTX_CNTKCTL_EL1]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000293#endif /* NS_TIMER_SWITCH */
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100294
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100295 /* Save MTE system registers if the build has instructed so */
296#if CTX_INCLUDE_MTE_REGS
297 mrs x15, TFSRE0_EL1
298 mrs x16, TFSR_EL1
299 stp x15, x16, [x0, #CTX_TFSRE0_EL1]
300
301 mrs x9, RGSR_EL1
302 mrs x10, GCR_EL1
303 stp x9, x10, [x0, #CTX_RGSR_EL1]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000304#endif /* CTX_INCLUDE_MTE_REGS */
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100305
Achin Gupta9ac63c52014-01-16 12:08:03 +0000306 ret
Kévin Petita877c252015-03-24 14:03:57 +0000307endfunc el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000308
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100309/* ------------------------------------------------------------------
310 * The following function strictly follows the AArch64 PCS to use
311 * x9-x17 (temporary caller-saved registers) to restore EL1 system
312 * register context. It assumes that 'x0' is pointing to a
313 * 'el1_sys_regs' structure from where the register context will be
314 * restored
315 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000316 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000317func el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000318
319 ldp x9, x10, [x0, #CTX_SPSR_EL1]
320 msr spsr_el1, x9
321 msr elr_el1, x10
322
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100323#if !ERRATA_SPECULATIVE_AT
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100324 ldp x15, x16, [x0, #CTX_SCTLR_EL1]
325 msr sctlr_el1, x15
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100326 msr tcr_el1, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000327#endif /* ERRATA_SPECULATIVE_AT */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000328
329 ldp x17, x9, [x0, #CTX_CPACR_EL1]
330 msr cpacr_el1, x17
331 msr csselr_el1, x9
332
333 ldp x10, x11, [x0, #CTX_SP_EL1]
334 msr sp_el1, x10
335 msr esr_el1, x11
336
337 ldp x12, x13, [x0, #CTX_TTBR0_EL1]
338 msr ttbr0_el1, x12
339 msr ttbr1_el1, x13
340
341 ldp x14, x15, [x0, #CTX_MAIR_EL1]
342 msr mair_el1, x14
343 msr amair_el1, x15
344
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100345 ldp x16, x17, [x0, #CTX_ACTLR_EL1]
346 msr actlr_el1, x16
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100347 msr tpidr_el1, x17
Achin Gupta9ac63c52014-01-16 12:08:03 +0000348
349 ldp x9, x10, [x0, #CTX_TPIDR_EL0]
350 msr tpidr_el0, x9
351 msr tpidrro_el0, x10
352
Achin Gupta9ac63c52014-01-16 12:08:03 +0000353 ldp x13, x14, [x0, #CTX_PAR_EL1]
354 msr par_el1, x13
355 msr far_el1, x14
356
357 ldp x15, x16, [x0, #CTX_AFSR0_EL1]
358 msr afsr0_el1, x15
359 msr afsr1_el1, x16
360
361 ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
362 msr contextidr_el1, x17
363 msr vbar_el1, x9
364
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100365 /* Restore AArch32 system registers if the build has instructed so */
366#if CTX_INCLUDE_AARCH32_REGS
367 ldp x11, x12, [x0, #CTX_SPSR_ABT]
368 msr spsr_abt, x11
369 msr spsr_und, x12
370
371 ldp x13, x14, [x0, #CTX_SPSR_IRQ]
372 msr spsr_irq, x13
373 msr spsr_fiq, x14
374
375 ldp x15, x16, [x0, #CTX_DACR32_EL2]
376 msr dacr32_el2, x15
377 msr ifsr32_el2, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000378#endif /* CTX_INCLUDE_AARCH32_REGS */
379
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100380 /* Restore NS timer registers if the build has instructed so */
381#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000382 ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
383 msr cntp_ctl_el0, x10
384 msr cntp_cval_el0, x11
385
386 ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
387 msr cntv_ctl_el0, x12
388 msr cntv_cval_el0, x13
389
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100390 ldr x14, [x0, #CTX_CNTKCTL_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000391 msr cntkctl_el1, x14
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000392#endif /* NS_TIMER_SWITCH */
393
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100394 /* Restore MTE system registers if the build has instructed so */
395#if CTX_INCLUDE_MTE_REGS
396 ldp x11, x12, [x0, #CTX_TFSRE0_EL1]
397 msr TFSRE0_EL1, x11
398 msr TFSR_EL1, x12
399
400 ldp x13, x14, [x0, #CTX_RGSR_EL1]
401 msr RGSR_EL1, x13
402 msr GCR_EL1, x14
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000403#endif /* CTX_INCLUDE_MTE_REGS */
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100404
Achin Gupta9ac63c52014-01-16 12:08:03 +0000405 /* No explict ISB required here as ERET covers it */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000406 ret
Kévin Petita877c252015-03-24 14:03:57 +0000407endfunc el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000408
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100409/* ------------------------------------------------------------------
410 * The following function follows the aapcs_64 strictly to use
411 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
412 * to save floating point register context. It assumes that 'x0' is
413 * pointing to a 'fp_regs' structure where the register context will
Achin Gupta9ac63c52014-01-16 12:08:03 +0000414 * be saved.
415 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100416 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
417 * However currently we don't use VFP registers nor set traps in
418 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000419 *
420 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100421 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000422 */
Juan Castillo258e94f2014-06-25 17:26:36 +0100423#if CTX_INCLUDE_FPREGS
Andrew Thoelke38bde412014-03-18 13:46:55 +0000424func fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000425 stp q0, q1, [x0, #CTX_FP_Q0]
426 stp q2, q3, [x0, #CTX_FP_Q2]
427 stp q4, q5, [x0, #CTX_FP_Q4]
428 stp q6, q7, [x0, #CTX_FP_Q6]
429 stp q8, q9, [x0, #CTX_FP_Q8]
430 stp q10, q11, [x0, #CTX_FP_Q10]
431 stp q12, q13, [x0, #CTX_FP_Q12]
432 stp q14, q15, [x0, #CTX_FP_Q14]
433 stp q16, q17, [x0, #CTX_FP_Q16]
434 stp q18, q19, [x0, #CTX_FP_Q18]
435 stp q20, q21, [x0, #CTX_FP_Q20]
436 stp q22, q23, [x0, #CTX_FP_Q22]
437 stp q24, q25, [x0, #CTX_FP_Q24]
438 stp q26, q27, [x0, #CTX_FP_Q26]
439 stp q28, q29, [x0, #CTX_FP_Q28]
440 stp q30, q31, [x0, #CTX_FP_Q30]
441
442 mrs x9, fpsr
443 str x9, [x0, #CTX_FP_FPSR]
444
445 mrs x10, fpcr
446 str x10, [x0, #CTX_FP_FPCR]
447
David Cunadod1a1fd42017-10-20 11:30:57 +0100448#if CTX_INCLUDE_AARCH32_REGS
449 mrs x11, fpexc32_el2
450 str x11, [x0, #CTX_FP_FPEXC32_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000451#endif /* CTX_INCLUDE_AARCH32_REGS */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000452 ret
Kévin Petita877c252015-03-24 14:03:57 +0000453endfunc fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000454
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100455/* ------------------------------------------------------------------
456 * The following function follows the aapcs_64 strictly to use x9-x17
457 * (temporary caller-saved registers according to AArch64 PCS) to
458 * restore floating point register context. It assumes that 'x0' is
459 * pointing to a 'fp_regs' structure from where the register context
Achin Gupta9ac63c52014-01-16 12:08:03 +0000460 * will be restored.
461 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100462 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
463 * However currently we don't use VFP registers nor set traps in
464 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000465 *
466 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100467 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000468 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000469func fpregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000470 ldp q0, q1, [x0, #CTX_FP_Q0]
471 ldp q2, q3, [x0, #CTX_FP_Q2]
472 ldp q4, q5, [x0, #CTX_FP_Q4]
473 ldp q6, q7, [x0, #CTX_FP_Q6]
474 ldp q8, q9, [x0, #CTX_FP_Q8]
475 ldp q10, q11, [x0, #CTX_FP_Q10]
476 ldp q12, q13, [x0, #CTX_FP_Q12]
477 ldp q14, q15, [x0, #CTX_FP_Q14]
478 ldp q16, q17, [x0, #CTX_FP_Q16]
479 ldp q18, q19, [x0, #CTX_FP_Q18]
480 ldp q20, q21, [x0, #CTX_FP_Q20]
481 ldp q22, q23, [x0, #CTX_FP_Q22]
482 ldp q24, q25, [x0, #CTX_FP_Q24]
483 ldp q26, q27, [x0, #CTX_FP_Q26]
484 ldp q28, q29, [x0, #CTX_FP_Q28]
485 ldp q30, q31, [x0, #CTX_FP_Q30]
486
487 ldr x9, [x0, #CTX_FP_FPSR]
488 msr fpsr, x9
489
Soby Mathewe77e1162015-12-03 09:42:50 +0000490 ldr x10, [x0, #CTX_FP_FPCR]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000491 msr fpcr, x10
492
David Cunadod1a1fd42017-10-20 11:30:57 +0100493#if CTX_INCLUDE_AARCH32_REGS
494 ldr x11, [x0, #CTX_FP_FPEXC32_EL2]
495 msr fpexc32_el2, x11
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000496#endif /* CTX_INCLUDE_AARCH32_REGS */
497
Achin Gupta9ac63c52014-01-16 12:08:03 +0000498 /*
499 * No explict ISB required here as ERET to
Sandrine Bailleuxf4119ec2015-12-17 13:58:58 +0000500 * switch to secure EL1 or non-secure world
Achin Gupta9ac63c52014-01-16 12:08:03 +0000501 * covers it
502 */
503
504 ret
Kévin Petita877c252015-03-24 14:03:57 +0000505endfunc fpregs_context_restore
Juan Castillo258e94f2014-06-25 17:26:36 +0100506#endif /* CTX_INCLUDE_FPREGS */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100507
Daniel Boulby928747f2021-05-25 18:09:34 +0100508 /*
Manish Pandey62d532a2022-11-17 15:47:05 +0000509 * Set SCR_EL3.EA bit to enable SErrors at EL3
510 */
511 .macro enable_serror_at_el3
512 mrs x8, scr_el3
513 orr x8, x8, #SCR_EA_BIT
514 msr scr_el3, x8
515 .endm
516
517 /*
Daniel Boulby928747f2021-05-25 18:09:34 +0100518 * Set the PSTATE bits not set when the exception was taken as
519 * described in the AArch64.TakeException() pseudocode function
520 * in ARM DDI 0487F.c page J1-7635 to a default value.
521 */
522 .macro set_unset_pstate_bits
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000523 /*
524 * If Data Independent Timing (DIT) functionality is implemented,
525 * always enable DIT in EL3
526 */
Daniel Boulby928747f2021-05-25 18:09:34 +0100527#if ENABLE_FEAT_DIT
Andre Przywara1f55c412023-01-26 16:47:52 +0000528#if ENABLE_FEAT_DIT == 2
529 mrs x8, id_aa64pfr0_el1
530 and x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT)
531 cbz x8, 1f
532#endif
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000533 mov x8, #DIT_BIT
534 msr DIT, x8
Andre Przywara1f55c412023-01-26 16:47:52 +00005351:
Daniel Boulby928747f2021-05-25 18:09:34 +0100536#endif /* ENABLE_FEAT_DIT */
537 .endm /* set_unset_pstate_bits */
538
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100539/* ------------------------------------------------------------------
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000540 * The following macro is used to save and restore all the general
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100541 * purpose and ARMv8.3-PAuth (if enabled) registers.
Jayanth Dodderi Chidanand4ec78ad2022-09-19 23:32:08 +0100542 * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
543 * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
544 * needs not to be saved/restored during world switch.
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100545 *
546 * Ideally we would only save and restore the callee saved registers
547 * when a world switch occurs but that type of implementation is more
548 * complex. So currently we will always save and restore these
549 * registers on entry and exit of EL3.
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100550 * clobbers: x18
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100551 * ------------------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100552 */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000553 .macro save_gp_pmcr_pauth_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100554 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
555 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
556 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
557 stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
558 stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
559 stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
560 stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
561 stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
562 stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
563 stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
564 stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
565 stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
566 stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
567 stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
568 stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
569 mrs x18, sp_el0
570 str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100571
572 /* ----------------------------------------------------------
Jayanth Dodderi Chidanand4ec78ad2022-09-19 23:32:08 +0100573 * Check if earlier initialization of MDCR_EL3.SCCD/MCCD to 1
574 * has failed.
575 *
576 * MDCR_EL3:
577 * MCCD bit set, Prohibits the Cycle Counter PMCCNTR_EL0 from
578 * counting at EL3.
579 * SCCD bit set, Secure Cycle Counter Disable. Prohibits PMCCNTR_EL0
580 * from counting in Secure state.
581 * If these bits are not set, meaning that FEAT_PMUv3p5/7 is
582 * not implemented and PMCR_EL0 should be saved in non-secure
583 * context.
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100584 * ----------------------------------------------------------
585 */
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100586 mov_imm x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100587 mrs x9, mdcr_el3
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100588 tst x9, x10
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100589 bne 1f
590
Jayanth Dodderi Chidanand4ec78ad2022-09-19 23:32:08 +0100591 /* ----------------------------------------------------------
592 * If control reaches here, it ensures the Secure Cycle
593 * Counter (PMCCNTR_EL0) is not prohibited from counting at
594 * EL3 and in secure states.
595 * Henceforth, PMCR_EL0 to be saved before world switch.
596 * ----------------------------------------------------------
597 */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100598 mrs x9, pmcr_el0
599
600 /* Check caller's security state */
601 mrs x10, scr_el3
602 tst x10, #SCR_NS_BIT
603 beq 2f
604
605 /* Save PMCR_EL0 if called from Non-secure state */
606 str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
607
608 /* Disable cycle counter when event counting is prohibited */
6092: orr x9, x9, #PMCR_EL0_DP_BIT
610 msr pmcr_el0, x9
611 isb
6121:
613#if CTX_INCLUDE_PAUTH_REGS
614 /* ----------------------------------------------------------
615 * Save the ARMv8.3-PAuth keys as they are not banked
616 * by exception level
617 * ----------------------------------------------------------
618 */
619 add x19, sp, #CTX_PAUTH_REGS_OFFSET
620
621 mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */
622 mrs x21, APIAKeyHi_EL1
623 mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */
624 mrs x23, APIBKeyHi_EL1
625 mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */
626 mrs x25, APDAKeyHi_EL1
627 mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */
628 mrs x27, APDBKeyHi_EL1
629 mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */
630 mrs x29, APGAKeyHi_EL1
631
632 stp x20, x21, [x19, #CTX_PACIAKEY_LO]
633 stp x22, x23, [x19, #CTX_PACIBKEY_LO]
634 stp x24, x25, [x19, #CTX_PACDAKEY_LO]
635 stp x26, x27, [x19, #CTX_PACDBKEY_LO]
636 stp x28, x29, [x19, #CTX_PACGAKEY_LO]
637#endif /* CTX_INCLUDE_PAUTH_REGS */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000638 .endm /* save_gp_pmcr_pauth_regs */
639
640/* -----------------------------------------------------------------
Daniel Boulby928747f2021-05-25 18:09:34 +0100641 * This function saves the context and sets the PSTATE to a known
642 * state, preparing entry to el3.
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000643 * Save all the general purpose and ARMv8.3-PAuth (if enabled)
644 * registers.
Daniel Boulby928747f2021-05-25 18:09:34 +0100645 * Then set any of the PSTATE bits that are not set by hardware
646 * according to the Aarch64.TakeException pseudocode in the Arm
647 * Architecture Reference Manual to a default value for EL3.
648 * clobbers: x17
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000649 * -----------------------------------------------------------------
650 */
651func prepare_el3_entry
652 save_gp_pmcr_pauth_regs
Manish Pandey62d532a2022-11-17 15:47:05 +0000653 enable_serror_at_el3
Daniel Boulby928747f2021-05-25 18:09:34 +0100654 /*
655 * Set the PSTATE bits not described in the Aarch64.TakeException
656 * pseudocode to their default values.
657 */
658 set_unset_pstate_bits
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100659 ret
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000660endfunc prepare_el3_entry
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100661
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100662/* ------------------------------------------------------------------
663 * This function restores ARMv8.3-PAuth (if enabled) and all general
664 * purpose registers except x30 from the CPU context.
665 * x30 register must be explicitly restored by the caller.
666 * ------------------------------------------------------------------
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000667 */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100668func restore_gp_pmcr_pauth_regs
669#if CTX_INCLUDE_PAUTH_REGS
670 /* Restore the ARMv8.3 PAuth keys */
671 add x10, sp, #CTX_PAUTH_REGS_OFFSET
672
673 ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */
674 ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */
675 ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */
676 ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */
677 ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */
678
679 msr APIAKeyLo_EL1, x0
680 msr APIAKeyHi_EL1, x1
681 msr APIBKeyLo_EL1, x2
682 msr APIBKeyHi_EL1, x3
683 msr APDAKeyLo_EL1, x4
684 msr APDAKeyHi_EL1, x5
685 msr APDBKeyLo_EL1, x6
686 msr APDBKeyHi_EL1, x7
687 msr APGAKeyLo_EL1, x8
688 msr APGAKeyHi_EL1, x9
689#endif /* CTX_INCLUDE_PAUTH_REGS */
690
691 /* ----------------------------------------------------------
692 * Restore PMCR_EL0 when returning to Non-secure state if
693 * Secure Cycle Counter is not disabled in MDCR_EL3 when
694 * ARMv8.5-PMU is implemented.
695 * ----------------------------------------------------------
696 */
697 mrs x0, scr_el3
698 tst x0, #SCR_NS_BIT
699 beq 2f
700
701 /* ----------------------------------------------------------
702 * Back to Non-secure state.
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100703 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
704 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
705 * PMCR_EL0 should be restored from non-secure context.
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100706 * ----------------------------------------------------------
707 */
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100708 mov_imm x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100709 mrs x0, mdcr_el3
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100710 tst x0, x1
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100711 bne 2f
712 ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
713 msr pmcr_el0, x0
7142:
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100715 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
716 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100717 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
718 ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
719 ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
720 ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
721 ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
722 ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000723 ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100724 ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
725 ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
726 ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
727 ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
728 ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000729 ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
730 msr sp_el0, x28
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100731 ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000732 ret
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100733endfunc restore_gp_pmcr_pauth_regs
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000734
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100735/*
736 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
737 * registers and update EL1 registers to disable stage1 and stage2
738 * page table walk
739 */
740func save_and_update_ptw_el1_sys_regs
741 /* ----------------------------------------------------------
742 * Save only sctlr_el1 and tcr_el1 registers
743 * ----------------------------------------------------------
744 */
745 mrs x29, sctlr_el1
746 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
747 mrs x29, tcr_el1
748 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
749
750 /* ------------------------------------------------------------
751 * Must follow below order in order to disable page table
752 * walk for lower ELs (EL1 and EL0). First step ensures that
753 * page table walk is disabled for stage1 and second step
754 * ensures that page table walker should use TCR_EL1.EPDx
755 * bits to perform address translation. ISB ensures that CPU
756 * does these 2 steps in order.
757 *
758 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
759 * stage1.
760 * 2. Enable MMU bit to avoid identity mapping via stage2
761 * and force TCR_EL1.EPDx to be used by the page table
762 * walker.
763 * ------------------------------------------------------------
764 */
765 orr x29, x29, #(TCR_EPD0_BIT)
766 orr x29, x29, #(TCR_EPD1_BIT)
767 msr tcr_el1, x29
768 isb
769 mrs x29, sctlr_el1
770 orr x29, x29, #SCTLR_M_BIT
771 msr sctlr_el1, x29
772 isb
773
774 ret
775endfunc save_and_update_ptw_el1_sys_regs
776
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100777/* ------------------------------------------------------------------
778 * This routine assumes that the SP_EL3 is pointing to a valid
779 * context structure from where the gp regs and other special
780 * registers can be retrieved.
781 * ------------------------------------------------------------------
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +0000782 */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100783func el3_exit
Jan Dabrosfa015982019-12-02 13:30:03 +0100784#if ENABLE_ASSERTIONS
785 /* el3_exit assumes SP_EL0 on entry */
786 mrs x17, spsel
787 cmp x17, #MODE_SP_EL0
788 ASM_ASSERT(eq)
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000789#endif /* ENABLE_ASSERTIONS */
Jan Dabrosfa015982019-12-02 13:30:03 +0100790
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100791 /* ----------------------------------------------------------
792 * Save the current SP_EL0 i.e. the EL3 runtime stack which
793 * will be used for handling the next SMC.
794 * Then switch to SP_EL3.
795 * ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100796 */
797 mov x17, sp
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100798 msr spsel, #MODE_SP_ELX
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100799 str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
800
Max Shvetsovc4502772021-03-22 11:59:37 +0000801#if IMAGE_BL31
802 /* ----------------------------------------------------------
Arunachalam Ganapathycac7d162021-07-08 09:35:57 +0100803 * Restore CPTR_EL3.
Max Shvetsovc4502772021-03-22 11:59:37 +0000804 * ZCR is only restored if SVE is supported and enabled.
805 * Synchronization is required before zcr_el3 is addressed.
806 * ----------------------------------------------------------
807 */
Max Shvetsovc4502772021-03-22 11:59:37 +0000808 ldp x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
809 msr cptr_el3, x19
810
811 ands x19, x19, #CPTR_EZ_BIT
812 beq sve_not_enabled
813
814 isb
815 msr S3_6_C1_C2_0, x20 /* zcr_el3 */
816sve_not_enabled:
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000817#endif /* IMAGE_BL31 */
Max Shvetsovc4502772021-03-22 11:59:37 +0000818
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100819#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100820 /* ----------------------------------------------------------
821 * Restore mitigation state as it was on entry to EL3
822 * ----------------------------------------------------------
823 */
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100824 ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100825 cbz x17, 1f
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100826 blr x17
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +00008271:
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000828#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
829
Andre Przywara870627e2023-01-27 12:25:49 +0000830/*
831 * This is a hot path, so we don't want to do some actual FEAT_RAS runtime
832 * detection here. The "esb" is a cheaper variant, so using "dsb" in the
833 * ENABLE_FEAT_RAS==2 case is not ideal, but won't hurt.
834 */
835#if IMAGE_BL31 && ENABLE_FEAT_RAS == 1
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100836 /* ----------------------------------------------------------
837 * Issue Error Synchronization Barrier to synchronize SErrors
838 * before exiting EL3. We're running with EAs unmasked, so
839 * any synchronized errors would be taken immediately;
840 * therefore no need to inspect DISR_EL1 register.
841 * ----------------------------------------------------------
842 */
843 esb
Madhukar Pappireddyfba25722020-07-24 03:27:12 -0500844#else
845 dsb sy
Manish Pandeyd419e222023-02-13 12:39:17 +0000846#endif /* IMAGE_BL31 && ENABLE_FEAT_RAS */
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000847
Manish Pandey53bc59a2022-11-17 14:43:15 +0000848 /* ----------------------------------------------------------
849 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
850 * ----------------------------------------------------------
851 */
852 ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
853 ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
854 msr scr_el3, x18
855 msr spsr_el3, x16
856 msr elr_el3, x17
857
858 restore_ptw_el1_sys_regs
859
860 /* ----------------------------------------------------------
861 * Restore general purpose (including x30), PMCR_EL0 and
862 * ARMv8.3-PAuth registers.
863 * Exit EL3 via ERET to a lower exception level.
864 * ----------------------------------------------------------
865 */
866 bl restore_gp_pmcr_pauth_regs
867 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
868
Madhukar Pappireddyfba25722020-07-24 03:27:12 -0500869#ifdef IMAGE_BL31
870 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000871#endif /* IMAGE_BL31 */
872
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -0800873 exception_return
Antonio Nino Diaz594811b2019-01-31 11:58:00 +0000874
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100875endfunc el3_exit