blob: 69acc2f9d6d21e873c7318414bcff4154974798a [file] [log] [blame]
Achin Gupta9ac63c52014-01-16 12:08:03 +00001/*
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +00002 * Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved.
Achin Gupta9ac63c52014-01-16 12:08:03 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta9ac63c52014-01-16 12:08:03 +00005 */
6
Dan Handley2bd4ef22014-04-09 13:14:54 +01007#include <arch.h>
Andrew Thoelke38bde412014-03-18 13:46:55 +00008#include <asm_macros.S>
Jan Dabrosfa015982019-12-02 13:30:03 +01009#include <assert_macros.S>
Dan Handley2bd4ef22014-04-09 13:14:54 +010010#include <context.h>
Manish V Badarkhee07e8082020-07-23 12:43:25 +010011#include <el3_common_macros.S>
Achin Gupta9ac63c52014-01-16 12:08:03 +000012
Max Shvetsovbdf502d2020-02-25 13:56:19 +000013#if CTX_INCLUDE_EL2_REGS
14 .global el2_sysregs_context_save
15 .global el2_sysregs_context_restore
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000016#endif /* CTX_INCLUDE_EL2_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +000017
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010018 .global el1_sysregs_context_save
19 .global el1_sysregs_context_restore
20#if CTX_INCLUDE_FPREGS
21 .global fpregs_context_save
22 .global fpregs_context_restore
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000023#endif /* CTX_INCLUDE_FPREGS */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +000024 .global prepare_el3_entry
Alexei Fedorovf41355c2019-09-13 14:11:59 +010025 .global restore_gp_pmcr_pauth_regs
Manish V Badarkhee07e8082020-07-23 12:43:25 +010026 .global save_and_update_ptw_el1_sys_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010027 .global el3_exit
28
Max Shvetsovbdf502d2020-02-25 13:56:19 +000029#if CTX_INCLUDE_EL2_REGS
30
31/* -----------------------------------------------------
32 * The following function strictly follows the AArch64
Max Shvetsovcf784f72021-03-31 19:00:38 +010033 * PCS to use x9-x16 (temporary caller-saved registers)
Max Shvetsovc9e2c922020-02-17 16:15:47 +000034 * to save EL2 system register context. It assumes that
35 * 'x0' is pointing to a 'el2_sys_regs' structure where
Max Shvetsovbdf502d2020-02-25 13:56:19 +000036 * the register context will be saved.
Max Shvetsovc9e2c922020-02-17 16:15:47 +000037 *
38 * The following registers are not added.
39 * AMEVCNTVOFF0<n>_EL2
40 * AMEVCNTVOFF1<n>_EL2
41 * ICH_AP0R<n>_EL2
42 * ICH_AP1R<n>_EL2
43 * ICH_LR<n>_EL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +000044 * -----------------------------------------------------
45 */
Max Shvetsovc9e2c922020-02-17 16:15:47 +000046func el2_sysregs_context_save
Max Shvetsovbdf502d2020-02-25 13:56:19 +000047 mrs x9, actlr_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000048 mrs x10, afsr0_el2
49 stp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000050
Max Shvetsovc9e2c922020-02-17 16:15:47 +000051 mrs x11, afsr1_el2
52 mrs x12, amair_el2
53 stp x11, x12, [x0, #CTX_AFSR1_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000054
Max Shvetsovc9e2c922020-02-17 16:15:47 +000055 mrs x13, cnthctl_el2
Max Shvetsovcf784f72021-03-31 19:00:38 +010056 mrs x14, cntvoff_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000057 stp x13, x14, [x0, #CTX_CNTHCTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000058
Max Shvetsovcf784f72021-03-31 19:00:38 +010059 mrs x15, cptr_el2
60 str x15, [x0, #CTX_CPTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000061
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +010062#if CTX_INCLUDE_AARCH32_REGS
Max Shvetsovcf784f72021-03-31 19:00:38 +010063 mrs x16, dbgvcr32_el2
64 str x16, [x0, #CTX_DBGVCR32_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000065#endif /* CTX_INCLUDE_AARCH32_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +000066
Max Shvetsovcf784f72021-03-31 19:00:38 +010067 mrs x9, elr_el2
68 mrs x10, esr_el2
69 stp x9, x10, [x0, #CTX_ELR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000070
Max Shvetsovcf784f72021-03-31 19:00:38 +010071 mrs x11, far_el2
72 mrs x12, hacr_el2
73 stp x11, x12, [x0, #CTX_FAR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000074
Max Shvetsovcf784f72021-03-31 19:00:38 +010075 mrs x13, hcr_el2
76 mrs x14, hpfar_el2
77 stp x13, x14, [x0, #CTX_HCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000078
Max Shvetsovcf784f72021-03-31 19:00:38 +010079 mrs x15, hstr_el2
80 mrs x16, ICC_SRE_EL2
81 stp x15, x16, [x0, #CTX_HSTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000082
Max Shvetsovcf784f72021-03-31 19:00:38 +010083 mrs x9, ICH_HCR_EL2
84 mrs x10, ICH_VMCR_EL2
85 stp x9, x10, [x0, #CTX_ICH_HCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000086
Max Shvetsovcf784f72021-03-31 19:00:38 +010087 mrs x11, mair_el2
88 mrs x12, mdcr_el2
89 stp x11, x12, [x0, #CTX_MAIR_EL2]
90
Arunachalam Ganapathy04b7e432020-10-09 14:51:41 +010091#if ENABLE_SPE_FOR_LOWER_ELS
Max Shvetsovcf784f72021-03-31 19:00:38 +010092 mrs x13, PMSCR_EL2
93 str x13, [x0, #CTX_PMSCR_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000094#endif /* ENABLE_SPE_FOR_LOWER_ELS */
95
Max Shvetsovcf784f72021-03-31 19:00:38 +010096 mrs x14, sctlr_el2
97 str x14, [x0, #CTX_SCTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000098
Max Shvetsovcf784f72021-03-31 19:00:38 +010099 mrs x15, spsr_el2
100 mrs x16, sp_el2
101 stp x15, x16, [x0, #CTX_SPSR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000102
Max Shvetsovcf784f72021-03-31 19:00:38 +0100103 mrs x9, tcr_el2
104 mrs x10, tpidr_el2
105 stp x9, x10, [x0, #CTX_TCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000106
Max Shvetsovcf784f72021-03-31 19:00:38 +0100107 mrs x11, ttbr0_el2
108 mrs x12, vbar_el2
109 stp x11, x12, [x0, #CTX_TTBR0_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000110
Max Shvetsovcf784f72021-03-31 19:00:38 +0100111 mrs x13, vmpidr_el2
112 mrs x14, vpidr_el2
113 stp x13, x14, [x0, #CTX_VMPIDR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000114
Max Shvetsovcf784f72021-03-31 19:00:38 +0100115 mrs x15, vtcr_el2
116 mrs x16, vttbr_el2
117 stp x15, x16, [x0, #CTX_VTCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000118
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000119#if CTX_INCLUDE_MTE_REGS
Max Shvetsovcf784f72021-03-31 19:00:38 +0100120 mrs x9, TFSR_EL2
121 str x9, [x0, #CTX_TFSR_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000122#endif /* CTX_INCLUDE_MTE_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000123
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000124#if ENABLE_MPAM_FOR_LOWER_ELS
Max Shvetsovcf784f72021-03-31 19:00:38 +0100125 mrs x10, MPAM2_EL2
126 str x10, [x0, #CTX_MPAM2_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000127
Max Shvetsovcf784f72021-03-31 19:00:38 +0100128 mrs x11, MPAMHCR_EL2
129 mrs x12, MPAMVPM0_EL2
130 stp x11, x12, [x0, #CTX_MPAMHCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000131
Max Shvetsovcf784f72021-03-31 19:00:38 +0100132 mrs x13, MPAMVPM1_EL2
133 mrs x14, MPAMVPM2_EL2
134 stp x13, x14, [x0, #CTX_MPAMVPM1_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000135
Max Shvetsovcf784f72021-03-31 19:00:38 +0100136 mrs x15, MPAMVPM3_EL2
137 mrs x16, MPAMVPM4_EL2
138 stp x15, x16, [x0, #CTX_MPAMVPM3_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000139
Max Shvetsovcf784f72021-03-31 19:00:38 +0100140 mrs x9, MPAMVPM5_EL2
141 mrs x10, MPAMVPM6_EL2
142 stp x9, x10, [x0, #CTX_MPAMVPM5_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000143
Max Shvetsovcf784f72021-03-31 19:00:38 +0100144 mrs x11, MPAMVPM7_EL2
145 mrs x12, MPAMVPMV_EL2
146 stp x11, x12, [x0, #CTX_MPAMVPM7_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000147#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000148
Jayanth Dodderi Chidanand13ae0f42021-11-25 14:59:30 +0000149#if ENABLE_FEAT_FGT
150 mrs x13, HDFGRTR_EL2
151#if ENABLE_FEAT_AMUv1
152 mrs x14, HAFGRTR_EL2
153 stp x13, x14, [x0, #CTX_HDFGRTR_EL2]
154#else
155 str x13, [x0, #CTX_HDFGRTR_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000156#endif /* ENABLE_FEAT_AMUv1 */
Max Shvetsovcf784f72021-03-31 19:00:38 +0100157 mrs x15, HDFGWTR_EL2
158 mrs x16, HFGITR_EL2
159 stp x15, x16, [x0, #CTX_HDFGWTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000160
Max Shvetsovcf784f72021-03-31 19:00:38 +0100161 mrs x9, HFGRTR_EL2
162 mrs x10, HFGWTR_EL2
163 stp x9, x10, [x0, #CTX_HFGRTR_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000164#endif /* ENABLE_FEAT_FGT */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000165
Jayanth Dodderi Chidanand13ae0f42021-11-25 14:59:30 +0000166#if ENABLE_FEAT_ECV
Max Shvetsovcf784f72021-03-31 19:00:38 +0100167 mrs x11, CNTPOFF_EL2
168 str x11, [x0, #CTX_CNTPOFF_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000169#endif /* ENABLE_FEAT_ECV */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000170
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000171#if ENABLE_FEAT_VHE
172 /*
173 * CONTEXTIDR_EL2 register is saved only when FEAT_VHE or
174 * FEAT_Debugv8p2 (currently not in TF-A) is supported.
175 */
176 mrs x9, contextidr_el2
177 mrs x10, ttbr1_el2
178 stp x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
179#endif /* ENABLE_FEAT_VHE */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000180
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000181#if RAS_EXTENSION
182 /*
183 * VDISR_EL2 and VSESR_EL2 registers are saved only when
184 * FEAT_RAS is supported.
185 */
186 mrs x11, vdisr_el2
187 mrs x12, vsesr_el2
188 stp x11, x12, [x0, #CTX_VDISR_EL2]
189#endif /* RAS_EXTENSION */
190
Arunachalam Ganapathydd3ec7e2020-05-28 11:57:09 +0100191#if CTX_INCLUDE_NEVE_REGS
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000192 /*
193 * VNCR_EL2 register is saved only when FEAT_NV2 is supported.
194 */
Max Shvetsovcf784f72021-03-31 19:00:38 +0100195 mrs x16, vncr_el2
196 str x16, [x0, #CTX_VNCR_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000197#endif /* CTX_INCLUDE_NEVE_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000198
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000199#if ENABLE_TRF_FOR_NS
200 /*
201 * TRFCR_EL2 register is saved only when FEAT_TRF is supported.
202 */
Max Shvetsovcf784f72021-03-31 19:00:38 +0100203 mrs x12, TRFCR_EL2
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000204 str x12, [x0, #CTX_TRFCR_EL2]
205#endif /* ENABLE_TRF_FOR_NS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000206
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000207#if ENABLE_FEAT_CSV2_2
208 /*
209 * SCXTNUM_EL2 register is saved only when FEAT_CSV2_2 is supported.
210 */
Max Shvetsovcf784f72021-03-31 19:00:38 +0100211 mrs x13, scxtnum_el2
212 str x13, [x0, #CTX_SCXTNUM_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000213#endif /* ENABLE_FEAT_CSV2_2 */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000214
johpow01f91e59f2021-08-04 19:38:18 -0500215#if ENABLE_FEAT_HCX
216 mrs x14, hcrx_el2
217 str x14, [x0, #CTX_HCRX_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000218#endif /* ENABLE_FEAT_HCX */
johpow01f91e59f2021-08-04 19:38:18 -0500219
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000220 ret
221endfunc el2_sysregs_context_save
222
Max Shvetsovcf784f72021-03-31 19:00:38 +0100223
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000224/* -----------------------------------------------------
225 * The following function strictly follows the AArch64
Max Shvetsovcf784f72021-03-31 19:00:38 +0100226 * PCS to use x9-x16 (temporary caller-saved registers)
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000227 * to restore EL2 system register context. It assumes
228 * that 'x0' is pointing to a 'el2_sys_regs' structure
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000229 * from where the register context will be restored
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000230
231 * The following registers are not restored
232 * AMEVCNTVOFF0<n>_EL2
233 * AMEVCNTVOFF1<n>_EL2
234 * ICH_AP0R<n>_EL2
235 * ICH_AP1R<n>_EL2
236 * ICH_LR<n>_EL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000237 * -----------------------------------------------------
238 */
239func el2_sysregs_context_restore
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000240 ldp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000241 msr actlr_el2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000242 msr afsr0_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000243
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000244 ldp x11, x12, [x0, #CTX_AFSR1_EL2]
245 msr afsr1_el2, x11
246 msr amair_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000247
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000248 ldp x13, x14, [x0, #CTX_CNTHCTL_EL2]
249 msr cnthctl_el2, x13
Max Shvetsovcf784f72021-03-31 19:00:38 +0100250 msr cntvoff_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000251
Max Shvetsovcf784f72021-03-31 19:00:38 +0100252 ldr x15, [x0, #CTX_CPTR_EL2]
253 msr cptr_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000254
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +0100255#if CTX_INCLUDE_AARCH32_REGS
Max Shvetsovcf784f72021-03-31 19:00:38 +0100256 ldr x16, [x0, #CTX_DBGVCR32_EL2]
257 msr dbgvcr32_el2, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000258#endif /* CTX_INCLUDE_AARCH32_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000259
Max Shvetsovcf784f72021-03-31 19:00:38 +0100260 ldp x9, x10, [x0, #CTX_ELR_EL2]
261 msr elr_el2, x9
262 msr esr_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000263
Max Shvetsovcf784f72021-03-31 19:00:38 +0100264 ldp x11, x12, [x0, #CTX_FAR_EL2]
265 msr far_el2, x11
266 msr hacr_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000267
Max Shvetsovcf784f72021-03-31 19:00:38 +0100268 ldp x13, x14, [x0, #CTX_HCR_EL2]
269 msr hcr_el2, x13
270 msr hpfar_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000271
Max Shvetsovcf784f72021-03-31 19:00:38 +0100272 ldp x15, x16, [x0, #CTX_HSTR_EL2]
273 msr hstr_el2, x15
274 msr ICC_SRE_EL2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000275
Max Shvetsovcf784f72021-03-31 19:00:38 +0100276 ldp x9, x10, [x0, #CTX_ICH_HCR_EL2]
277 msr ICH_HCR_EL2, x9
278 msr ICH_VMCR_EL2, x10
279
280 ldp x11, x12, [x0, #CTX_MAIR_EL2]
281 msr mair_el2, x11
282 msr mdcr_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000283
Arunachalam Ganapathy04b7e432020-10-09 14:51:41 +0100284#if ENABLE_SPE_FOR_LOWER_ELS
Max Shvetsovcf784f72021-03-31 19:00:38 +0100285 ldr x13, [x0, #CTX_PMSCR_EL2]
286 msr PMSCR_EL2, x13
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000287#endif /* ENABLE_SPE_FOR_LOWER_ELS */
288
Max Shvetsovcf784f72021-03-31 19:00:38 +0100289 ldr x14, [x0, #CTX_SCTLR_EL2]
290 msr sctlr_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000291
Max Shvetsovcf784f72021-03-31 19:00:38 +0100292 ldp x15, x16, [x0, #CTX_SPSR_EL2]
293 msr spsr_el2, x15
294 msr sp_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000295
Max Shvetsovcf784f72021-03-31 19:00:38 +0100296 ldp x9, x10, [x0, #CTX_TCR_EL2]
297 msr tcr_el2, x9
298 msr tpidr_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000299
Max Shvetsovcf784f72021-03-31 19:00:38 +0100300 ldp x11, x12, [x0, #CTX_TTBR0_EL2]
301 msr ttbr0_el2, x11
302 msr vbar_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000303
Max Shvetsovcf784f72021-03-31 19:00:38 +0100304 ldp x13, x14, [x0, #CTX_VMPIDR_EL2]
305 msr vmpidr_el2, x13
306 msr vpidr_el2, x14
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100307
Max Shvetsovcf784f72021-03-31 19:00:38 +0100308 ldp x15, x16, [x0, #CTX_VTCR_EL2]
309 msr vtcr_el2, x15
310 msr vttbr_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000311
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000312#if CTX_INCLUDE_MTE_REGS
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100313 ldr x9, [x0, #CTX_TFSR_EL2]
314 msr TFSR_EL2, x9
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000315#endif /* CTX_INCLUDE_MTE_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000316
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000317#if ENABLE_MPAM_FOR_LOWER_ELS
Max Shvetsovcf784f72021-03-31 19:00:38 +0100318 ldr x10, [x0, #CTX_MPAM2_EL2]
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100319 msr MPAM2_EL2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000320
Max Shvetsovcf784f72021-03-31 19:00:38 +0100321 ldp x11, x12, [x0, #CTX_MPAMHCR_EL2]
322 msr MPAMHCR_EL2, x11
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100323 msr MPAMVPM0_EL2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000324
Max Shvetsovcf784f72021-03-31 19:00:38 +0100325 ldp x13, x14, [x0, #CTX_MPAMVPM1_EL2]
326 msr MPAMVPM1_EL2, x13
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100327 msr MPAMVPM2_EL2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000328
Max Shvetsovcf784f72021-03-31 19:00:38 +0100329 ldp x15, x16, [x0, #CTX_MPAMVPM3_EL2]
330 msr MPAMVPM3_EL2, x15
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100331 msr MPAMVPM4_EL2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000332
Max Shvetsovcf784f72021-03-31 19:00:38 +0100333 ldp x9, x10, [x0, #CTX_MPAMVPM5_EL2]
334 msr MPAMVPM5_EL2, x9
335 msr MPAMVPM6_EL2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000336
Max Shvetsovcf784f72021-03-31 19:00:38 +0100337 ldp x11, x12, [x0, #CTX_MPAMVPM7_EL2]
338 msr MPAMVPM7_EL2, x11
339 msr MPAMVPMV_EL2, x12
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000340#endif /* ENABLE_MPAM_FOR_LOWER_ELS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000341
Jayanth Dodderi Chidanand13ae0f42021-11-25 14:59:30 +0000342#if ENABLE_FEAT_FGT
343#if ENABLE_FEAT_AMUv1
344 ldp x13, x14, [x0, #CTX_HDFGRTR_EL2]
345 msr HAFGRTR_EL2, x14
346#else
347 ldr x13, [x0, #CTX_HDFGRTR_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000348#endif /* ENABLE_FEAT_AMUv1 */
Jayanth Dodderi Chidanand13ae0f42021-11-25 14:59:30 +0000349 msr HDFGRTR_EL2, x13
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000350
Max Shvetsovcf784f72021-03-31 19:00:38 +0100351 ldp x15, x16, [x0, #CTX_HDFGWTR_EL2]
352 msr HDFGWTR_EL2, x15
353 msr HFGITR_EL2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000354
Max Shvetsovcf784f72021-03-31 19:00:38 +0100355 ldp x9, x10, [x0, #CTX_HFGRTR_EL2]
356 msr HFGRTR_EL2, x9
357 msr HFGWTR_EL2, x10
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000358#endif /* ENABLE_FEAT_FGT */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000359
Jayanth Dodderi Chidanand13ae0f42021-11-25 14:59:30 +0000360#if ENABLE_FEAT_ECV
Max Shvetsovcf784f72021-03-31 19:00:38 +0100361 ldr x11, [x0, #CTX_CNTPOFF_EL2]
362 msr CNTPOFF_EL2, x11
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000363#endif /* ENABLE_FEAT_ECV */
364
365#if ENABLE_FEAT_VHE
366 /*
367 * CONTEXTIDR_EL2 register is restored only when FEAT_VHE or
368 * FEAT_Debugv8p2 (currently not in TF-A) is supported.
369 */
370 ldp x9, x10, [x0, #CTX_CONTEXTIDR_EL2]
371 msr contextidr_el2, x9
372 msr ttbr1_el2, x10
373#endif /* ENABLE_FEAT_VHE */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000374
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000375#if RAS_EXTENSION
376 /*
377 * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS
378 * is supported.
379 */
380 ldp x11, x12, [x0, #CTX_VDISR_EL2]
381 msr vdisr_el2, x11
382 msr vsesr_el2, x12
383#endif /* RAS_EXTENSION */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000384
Arunachalam Ganapathydd3ec7e2020-05-28 11:57:09 +0100385#if CTX_INCLUDE_NEVE_REGS
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000386 /*
387 * VNCR_EL2 register is restored only when FEAT_NV2 is supported.
388 */
Max Shvetsovcf784f72021-03-31 19:00:38 +0100389 ldr x16, [x0, #CTX_VNCR_EL2]
390 msr vncr_el2, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000391#endif /* CTX_INCLUDE_NEVE_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000392
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000393#if ENABLE_TRF_FOR_NS
394 /*
395 * TRFCR_EL2 register is restored only when FEAT_TRF is supported.
396 */
397 ldr x12, [x0, #CTX_TRFCR_EL2]
Max Shvetsovcf784f72021-03-31 19:00:38 +0100398 msr TRFCR_EL2, x12
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000399#endif /* ENABLE_TRF_FOR_NS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000400
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000401#if ENABLE_FEAT_CSV2_2
402 /*
403 * SCXTNUM_EL2 register is restored only when FEAT_CSV2_2 is supported.
404 */
Max Shvetsovcf784f72021-03-31 19:00:38 +0100405 ldr x13, [x0, #CTX_SCXTNUM_EL2]
406 msr scxtnum_el2, x13
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000407#endif /* ENABLE_FEAT_CSV2_2 */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000408
johpow01f91e59f2021-08-04 19:38:18 -0500409#if ENABLE_FEAT_HCX
410 ldr x14, [x0, #CTX_HCRX_EL2]
411 msr hcrx_el2, x14
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000412#endif /* ENABLE_FEAT_HCX */
johpow01f91e59f2021-08-04 19:38:18 -0500413
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000414 ret
415endfunc el2_sysregs_context_restore
416
417#endif /* CTX_INCLUDE_EL2_REGS */
418
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100419/* ------------------------------------------------------------------
420 * The following function strictly follows the AArch64 PCS to use
421 * x9-x17 (temporary caller-saved registers) to save EL1 system
422 * register context. It assumes that 'x0' is pointing to a
423 * 'el1_sys_regs' structure where the register context will be saved.
424 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000425 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000426func el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000427
428 mrs x9, spsr_el1
429 mrs x10, elr_el1
430 stp x9, x10, [x0, #CTX_SPSR_EL1]
431
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100432#if !ERRATA_SPECULATIVE_AT
Achin Gupta9ac63c52014-01-16 12:08:03 +0000433 mrs x15, sctlr_el1
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100434 mrs x16, tcr_el1
Achin Gupta9ac63c52014-01-16 12:08:03 +0000435 stp x15, x16, [x0, #CTX_SCTLR_EL1]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000436#endif /* ERRATA_SPECULATIVE_AT */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000437
438 mrs x17, cpacr_el1
439 mrs x9, csselr_el1
440 stp x17, x9, [x0, #CTX_CPACR_EL1]
441
442 mrs x10, sp_el1
443 mrs x11, esr_el1
444 stp x10, x11, [x0, #CTX_SP_EL1]
445
446 mrs x12, ttbr0_el1
447 mrs x13, ttbr1_el1
448 stp x12, x13, [x0, #CTX_TTBR0_EL1]
449
450 mrs x14, mair_el1
451 mrs x15, amair_el1
452 stp x14, x15, [x0, #CTX_MAIR_EL1]
453
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100454 mrs x16, actlr_el1
Achin Gupta9ac63c52014-01-16 12:08:03 +0000455 mrs x17, tpidr_el1
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100456 stp x16, x17, [x0, #CTX_ACTLR_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000457
458 mrs x9, tpidr_el0
459 mrs x10, tpidrro_el0
460 stp x9, x10, [x0, #CTX_TPIDR_EL0]
461
Achin Gupta9ac63c52014-01-16 12:08:03 +0000462 mrs x13, par_el1
463 mrs x14, far_el1
464 stp x13, x14, [x0, #CTX_PAR_EL1]
465
466 mrs x15, afsr0_el1
467 mrs x16, afsr1_el1
468 stp x15, x16, [x0, #CTX_AFSR0_EL1]
469
470 mrs x17, contextidr_el1
471 mrs x9, vbar_el1
472 stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
473
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100474 /* Save AArch32 system registers if the build has instructed so */
475#if CTX_INCLUDE_AARCH32_REGS
476 mrs x11, spsr_abt
477 mrs x12, spsr_und
478 stp x11, x12, [x0, #CTX_SPSR_ABT]
479
480 mrs x13, spsr_irq
481 mrs x14, spsr_fiq
482 stp x13, x14, [x0, #CTX_SPSR_IRQ]
483
484 mrs x15, dacr32_el2
485 mrs x16, ifsr32_el2
486 stp x15, x16, [x0, #CTX_DACR32_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000487#endif /* CTX_INCLUDE_AARCH32_REGS */
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100488
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100489 /* Save NS timer registers if the build has instructed so */
490#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000491 mrs x10, cntp_ctl_el0
492 mrs x11, cntp_cval_el0
493 stp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
494
495 mrs x12, cntv_ctl_el0
496 mrs x13, cntv_cval_el0
497 stp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
498
499 mrs x14, cntkctl_el1
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100500 str x14, [x0, #CTX_CNTKCTL_EL1]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000501#endif /* NS_TIMER_SWITCH */
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100502
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100503 /* Save MTE system registers if the build has instructed so */
504#if CTX_INCLUDE_MTE_REGS
505 mrs x15, TFSRE0_EL1
506 mrs x16, TFSR_EL1
507 stp x15, x16, [x0, #CTX_TFSRE0_EL1]
508
509 mrs x9, RGSR_EL1
510 mrs x10, GCR_EL1
511 stp x9, x10, [x0, #CTX_RGSR_EL1]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000512#endif /* CTX_INCLUDE_MTE_REGS */
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100513
Achin Gupta9ac63c52014-01-16 12:08:03 +0000514 ret
Kévin Petita877c252015-03-24 14:03:57 +0000515endfunc el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000516
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100517/* ------------------------------------------------------------------
518 * The following function strictly follows the AArch64 PCS to use
519 * x9-x17 (temporary caller-saved registers) to restore EL1 system
520 * register context. It assumes that 'x0' is pointing to a
521 * 'el1_sys_regs' structure from where the register context will be
522 * restored
523 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000524 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000525func el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000526
527 ldp x9, x10, [x0, #CTX_SPSR_EL1]
528 msr spsr_el1, x9
529 msr elr_el1, x10
530
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100531#if !ERRATA_SPECULATIVE_AT
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100532 ldp x15, x16, [x0, #CTX_SCTLR_EL1]
533 msr sctlr_el1, x15
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100534 msr tcr_el1, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000535#endif /* ERRATA_SPECULATIVE_AT */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000536
537 ldp x17, x9, [x0, #CTX_CPACR_EL1]
538 msr cpacr_el1, x17
539 msr csselr_el1, x9
540
541 ldp x10, x11, [x0, #CTX_SP_EL1]
542 msr sp_el1, x10
543 msr esr_el1, x11
544
545 ldp x12, x13, [x0, #CTX_TTBR0_EL1]
546 msr ttbr0_el1, x12
547 msr ttbr1_el1, x13
548
549 ldp x14, x15, [x0, #CTX_MAIR_EL1]
550 msr mair_el1, x14
551 msr amair_el1, x15
552
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100553 ldp x16, x17, [x0, #CTX_ACTLR_EL1]
554 msr actlr_el1, x16
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100555 msr tpidr_el1, x17
Achin Gupta9ac63c52014-01-16 12:08:03 +0000556
557 ldp x9, x10, [x0, #CTX_TPIDR_EL0]
558 msr tpidr_el0, x9
559 msr tpidrro_el0, x10
560
Achin Gupta9ac63c52014-01-16 12:08:03 +0000561 ldp x13, x14, [x0, #CTX_PAR_EL1]
562 msr par_el1, x13
563 msr far_el1, x14
564
565 ldp x15, x16, [x0, #CTX_AFSR0_EL1]
566 msr afsr0_el1, x15
567 msr afsr1_el1, x16
568
569 ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
570 msr contextidr_el1, x17
571 msr vbar_el1, x9
572
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100573 /* Restore AArch32 system registers if the build has instructed so */
574#if CTX_INCLUDE_AARCH32_REGS
575 ldp x11, x12, [x0, #CTX_SPSR_ABT]
576 msr spsr_abt, x11
577 msr spsr_und, x12
578
579 ldp x13, x14, [x0, #CTX_SPSR_IRQ]
580 msr spsr_irq, x13
581 msr spsr_fiq, x14
582
583 ldp x15, x16, [x0, #CTX_DACR32_EL2]
584 msr dacr32_el2, x15
585 msr ifsr32_el2, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000586#endif /* CTX_INCLUDE_AARCH32_REGS */
587
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100588 /* Restore NS timer registers if the build has instructed so */
589#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000590 ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
591 msr cntp_ctl_el0, x10
592 msr cntp_cval_el0, x11
593
594 ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
595 msr cntv_ctl_el0, x12
596 msr cntv_cval_el0, x13
597
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100598 ldr x14, [x0, #CTX_CNTKCTL_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000599 msr cntkctl_el1, x14
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000600#endif /* NS_TIMER_SWITCH */
601
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100602 /* Restore MTE system registers if the build has instructed so */
603#if CTX_INCLUDE_MTE_REGS
604 ldp x11, x12, [x0, #CTX_TFSRE0_EL1]
605 msr TFSRE0_EL1, x11
606 msr TFSR_EL1, x12
607
608 ldp x13, x14, [x0, #CTX_RGSR_EL1]
609 msr RGSR_EL1, x13
610 msr GCR_EL1, x14
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000611#endif /* CTX_INCLUDE_MTE_REGS */
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100612
Achin Gupta9ac63c52014-01-16 12:08:03 +0000613 /* No explict ISB required here as ERET covers it */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000614 ret
Kévin Petita877c252015-03-24 14:03:57 +0000615endfunc el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000616
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100617/* ------------------------------------------------------------------
618 * The following function follows the aapcs_64 strictly to use
619 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
620 * to save floating point register context. It assumes that 'x0' is
621 * pointing to a 'fp_regs' structure where the register context will
Achin Gupta9ac63c52014-01-16 12:08:03 +0000622 * be saved.
623 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100624 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
625 * However currently we don't use VFP registers nor set traps in
626 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000627 *
628 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100629 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000630 */
Juan Castillo258e94f2014-06-25 17:26:36 +0100631#if CTX_INCLUDE_FPREGS
Andrew Thoelke38bde412014-03-18 13:46:55 +0000632func fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000633 stp q0, q1, [x0, #CTX_FP_Q0]
634 stp q2, q3, [x0, #CTX_FP_Q2]
635 stp q4, q5, [x0, #CTX_FP_Q4]
636 stp q6, q7, [x0, #CTX_FP_Q6]
637 stp q8, q9, [x0, #CTX_FP_Q8]
638 stp q10, q11, [x0, #CTX_FP_Q10]
639 stp q12, q13, [x0, #CTX_FP_Q12]
640 stp q14, q15, [x0, #CTX_FP_Q14]
641 stp q16, q17, [x0, #CTX_FP_Q16]
642 stp q18, q19, [x0, #CTX_FP_Q18]
643 stp q20, q21, [x0, #CTX_FP_Q20]
644 stp q22, q23, [x0, #CTX_FP_Q22]
645 stp q24, q25, [x0, #CTX_FP_Q24]
646 stp q26, q27, [x0, #CTX_FP_Q26]
647 stp q28, q29, [x0, #CTX_FP_Q28]
648 stp q30, q31, [x0, #CTX_FP_Q30]
649
650 mrs x9, fpsr
651 str x9, [x0, #CTX_FP_FPSR]
652
653 mrs x10, fpcr
654 str x10, [x0, #CTX_FP_FPCR]
655
David Cunadod1a1fd42017-10-20 11:30:57 +0100656#if CTX_INCLUDE_AARCH32_REGS
657 mrs x11, fpexc32_el2
658 str x11, [x0, #CTX_FP_FPEXC32_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000659#endif /* CTX_INCLUDE_AARCH32_REGS */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000660 ret
Kévin Petita877c252015-03-24 14:03:57 +0000661endfunc fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000662
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100663/* ------------------------------------------------------------------
664 * The following function follows the aapcs_64 strictly to use x9-x17
665 * (temporary caller-saved registers according to AArch64 PCS) to
666 * restore floating point register context. It assumes that 'x0' is
667 * pointing to a 'fp_regs' structure from where the register context
Achin Gupta9ac63c52014-01-16 12:08:03 +0000668 * will be restored.
669 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100670 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
671 * However currently we don't use VFP registers nor set traps in
672 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000673 *
674 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100675 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000676 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000677func fpregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000678 ldp q0, q1, [x0, #CTX_FP_Q0]
679 ldp q2, q3, [x0, #CTX_FP_Q2]
680 ldp q4, q5, [x0, #CTX_FP_Q4]
681 ldp q6, q7, [x0, #CTX_FP_Q6]
682 ldp q8, q9, [x0, #CTX_FP_Q8]
683 ldp q10, q11, [x0, #CTX_FP_Q10]
684 ldp q12, q13, [x0, #CTX_FP_Q12]
685 ldp q14, q15, [x0, #CTX_FP_Q14]
686 ldp q16, q17, [x0, #CTX_FP_Q16]
687 ldp q18, q19, [x0, #CTX_FP_Q18]
688 ldp q20, q21, [x0, #CTX_FP_Q20]
689 ldp q22, q23, [x0, #CTX_FP_Q22]
690 ldp q24, q25, [x0, #CTX_FP_Q24]
691 ldp q26, q27, [x0, #CTX_FP_Q26]
692 ldp q28, q29, [x0, #CTX_FP_Q28]
693 ldp q30, q31, [x0, #CTX_FP_Q30]
694
695 ldr x9, [x0, #CTX_FP_FPSR]
696 msr fpsr, x9
697
Soby Mathewe77e1162015-12-03 09:42:50 +0000698 ldr x10, [x0, #CTX_FP_FPCR]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000699 msr fpcr, x10
700
David Cunadod1a1fd42017-10-20 11:30:57 +0100701#if CTX_INCLUDE_AARCH32_REGS
702 ldr x11, [x0, #CTX_FP_FPEXC32_EL2]
703 msr fpexc32_el2, x11
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000704#endif /* CTX_INCLUDE_AARCH32_REGS */
705
Achin Gupta9ac63c52014-01-16 12:08:03 +0000706 /*
707 * No explict ISB required here as ERET to
Sandrine Bailleuxf4119ec2015-12-17 13:58:58 +0000708 * switch to secure EL1 or non-secure world
Achin Gupta9ac63c52014-01-16 12:08:03 +0000709 * covers it
710 */
711
712 ret
Kévin Petita877c252015-03-24 14:03:57 +0000713endfunc fpregs_context_restore
Juan Castillo258e94f2014-06-25 17:26:36 +0100714#endif /* CTX_INCLUDE_FPREGS */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100715
Daniel Boulby928747f2021-05-25 18:09:34 +0100716 /*
717 * Set the PSTATE bits not set when the exception was taken as
718 * described in the AArch64.TakeException() pseudocode function
719 * in ARM DDI 0487F.c page J1-7635 to a default value.
720 */
721 .macro set_unset_pstate_bits
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000722 /*
723 * If Data Independent Timing (DIT) functionality is implemented,
724 * always enable DIT in EL3
725 */
Daniel Boulby928747f2021-05-25 18:09:34 +0100726#if ENABLE_FEAT_DIT
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000727 mov x8, #DIT_BIT
728 msr DIT, x8
Daniel Boulby928747f2021-05-25 18:09:34 +0100729#endif /* ENABLE_FEAT_DIT */
730 .endm /* set_unset_pstate_bits */
731
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100732/* ------------------------------------------------------------------
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000733 * The following macro is used to save and restore all the general
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100734 * purpose and ARMv8.3-PAuth (if enabled) registers.
735 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
736 * when ARMv8.5-PMU is implemented, and if called from Non-secure
737 * state saves PMCR_EL0 and disables Cycle Counter.
738 *
739 * Ideally we would only save and restore the callee saved registers
740 * when a world switch occurs but that type of implementation is more
741 * complex. So currently we will always save and restore these
742 * registers on entry and exit of EL3.
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100743 * clobbers: x18
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100744 * ------------------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100745 */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000746 .macro save_gp_pmcr_pauth_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100747 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
748 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
749 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
750 stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
751 stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
752 stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
753 stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
754 stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
755 stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
756 stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
757 stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
758 stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
759 stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
760 stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
761 stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
762 mrs x18, sp_el0
763 str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100764
765 /* ----------------------------------------------------------
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100766 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
767 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
768 * PMCR_EL0 should be saved in non-secure context.
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100769 * ----------------------------------------------------------
770 */
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100771 mov_imm x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100772 mrs x9, mdcr_el3
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100773 tst x9, x10
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100774 bne 1f
775
776 /* Secure Cycle Counter is not disabled */
777 mrs x9, pmcr_el0
778
779 /* Check caller's security state */
780 mrs x10, scr_el3
781 tst x10, #SCR_NS_BIT
782 beq 2f
783
784 /* Save PMCR_EL0 if called from Non-secure state */
785 str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
786
787 /* Disable cycle counter when event counting is prohibited */
7882: orr x9, x9, #PMCR_EL0_DP_BIT
789 msr pmcr_el0, x9
790 isb
7911:
792#if CTX_INCLUDE_PAUTH_REGS
793 /* ----------------------------------------------------------
794 * Save the ARMv8.3-PAuth keys as they are not banked
795 * by exception level
796 * ----------------------------------------------------------
797 */
798 add x19, sp, #CTX_PAUTH_REGS_OFFSET
799
800 mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */
801 mrs x21, APIAKeyHi_EL1
802 mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */
803 mrs x23, APIBKeyHi_EL1
804 mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */
805 mrs x25, APDAKeyHi_EL1
806 mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */
807 mrs x27, APDBKeyHi_EL1
808 mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */
809 mrs x29, APGAKeyHi_EL1
810
811 stp x20, x21, [x19, #CTX_PACIAKEY_LO]
812 stp x22, x23, [x19, #CTX_PACIBKEY_LO]
813 stp x24, x25, [x19, #CTX_PACDAKEY_LO]
814 stp x26, x27, [x19, #CTX_PACDBKEY_LO]
815 stp x28, x29, [x19, #CTX_PACGAKEY_LO]
816#endif /* CTX_INCLUDE_PAUTH_REGS */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000817 .endm /* save_gp_pmcr_pauth_regs */
818
819/* -----------------------------------------------------------------
Daniel Boulby928747f2021-05-25 18:09:34 +0100820 * This function saves the context and sets the PSTATE to a known
821 * state, preparing entry to el3.
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000822 * Save all the general purpose and ARMv8.3-PAuth (if enabled)
823 * registers.
Daniel Boulby928747f2021-05-25 18:09:34 +0100824 * Then set any of the PSTATE bits that are not set by hardware
825 * according to the Aarch64.TakeException pseudocode in the Arm
826 * Architecture Reference Manual to a default value for EL3.
827 * clobbers: x17
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000828 * -----------------------------------------------------------------
829 */
830func prepare_el3_entry
831 save_gp_pmcr_pauth_regs
Daniel Boulby928747f2021-05-25 18:09:34 +0100832 /*
833 * Set the PSTATE bits not described in the Aarch64.TakeException
834 * pseudocode to their default values.
835 */
836 set_unset_pstate_bits
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100837 ret
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000838endfunc prepare_el3_entry
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100839
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100840/* ------------------------------------------------------------------
841 * This function restores ARMv8.3-PAuth (if enabled) and all general
842 * purpose registers except x30 from the CPU context.
843 * x30 register must be explicitly restored by the caller.
844 * ------------------------------------------------------------------
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000845 */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100846func restore_gp_pmcr_pauth_regs
847#if CTX_INCLUDE_PAUTH_REGS
848 /* Restore the ARMv8.3 PAuth keys */
849 add x10, sp, #CTX_PAUTH_REGS_OFFSET
850
851 ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */
852 ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */
853 ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */
854 ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */
855 ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */
856
857 msr APIAKeyLo_EL1, x0
858 msr APIAKeyHi_EL1, x1
859 msr APIBKeyLo_EL1, x2
860 msr APIBKeyHi_EL1, x3
861 msr APDAKeyLo_EL1, x4
862 msr APDAKeyHi_EL1, x5
863 msr APDBKeyLo_EL1, x6
864 msr APDBKeyHi_EL1, x7
865 msr APGAKeyLo_EL1, x8
866 msr APGAKeyHi_EL1, x9
867#endif /* CTX_INCLUDE_PAUTH_REGS */
868
869 /* ----------------------------------------------------------
870 * Restore PMCR_EL0 when returning to Non-secure state if
871 * Secure Cycle Counter is not disabled in MDCR_EL3 when
872 * ARMv8.5-PMU is implemented.
873 * ----------------------------------------------------------
874 */
875 mrs x0, scr_el3
876 tst x0, #SCR_NS_BIT
877 beq 2f
878
879 /* ----------------------------------------------------------
880 * Back to Non-secure state.
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100881 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
882 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
883 * PMCR_EL0 should be restored from non-secure context.
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100884 * ----------------------------------------------------------
885 */
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100886 mov_imm x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100887 mrs x0, mdcr_el3
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100888 tst x0, x1
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100889 bne 2f
890 ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
891 msr pmcr_el0, x0
8922:
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100893 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
894 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100895 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
896 ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
897 ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
898 ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
899 ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
900 ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000901 ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100902 ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
903 ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
904 ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
905 ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
906 ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000907 ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
908 msr sp_el0, x28
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100909 ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000910 ret
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100911endfunc restore_gp_pmcr_pauth_regs
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000912
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100913/*
914 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
915 * registers and update EL1 registers to disable stage1 and stage2
916 * page table walk
917 */
918func save_and_update_ptw_el1_sys_regs
919 /* ----------------------------------------------------------
920 * Save only sctlr_el1 and tcr_el1 registers
921 * ----------------------------------------------------------
922 */
923 mrs x29, sctlr_el1
924 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
925 mrs x29, tcr_el1
926 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
927
928 /* ------------------------------------------------------------
929 * Must follow below order in order to disable page table
930 * walk for lower ELs (EL1 and EL0). First step ensures that
931 * page table walk is disabled for stage1 and second step
932 * ensures that page table walker should use TCR_EL1.EPDx
933 * bits to perform address translation. ISB ensures that CPU
934 * does these 2 steps in order.
935 *
936 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
937 * stage1.
938 * 2. Enable MMU bit to avoid identity mapping via stage2
939 * and force TCR_EL1.EPDx to be used by the page table
940 * walker.
941 * ------------------------------------------------------------
942 */
943 orr x29, x29, #(TCR_EPD0_BIT)
944 orr x29, x29, #(TCR_EPD1_BIT)
945 msr tcr_el1, x29
946 isb
947 mrs x29, sctlr_el1
948 orr x29, x29, #SCTLR_M_BIT
949 msr sctlr_el1, x29
950 isb
951
952 ret
953endfunc save_and_update_ptw_el1_sys_regs
954
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100955/* ------------------------------------------------------------------
956 * This routine assumes that the SP_EL3 is pointing to a valid
957 * context structure from where the gp regs and other special
958 * registers can be retrieved.
959 * ------------------------------------------------------------------
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +0000960 */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100961func el3_exit
Jan Dabrosfa015982019-12-02 13:30:03 +0100962#if ENABLE_ASSERTIONS
963 /* el3_exit assumes SP_EL0 on entry */
964 mrs x17, spsel
965 cmp x17, #MODE_SP_EL0
966 ASM_ASSERT(eq)
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000967#endif /* ENABLE_ASSERTIONS */
Jan Dabrosfa015982019-12-02 13:30:03 +0100968
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100969 /* ----------------------------------------------------------
970 * Save the current SP_EL0 i.e. the EL3 runtime stack which
971 * will be used for handling the next SMC.
972 * Then switch to SP_EL3.
973 * ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100974 */
975 mov x17, sp
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100976 msr spsel, #MODE_SP_ELX
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100977 str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
978
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100979 /* ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100980 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100981 * ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100982 */
983 ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
984 ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
985 msr scr_el3, x18
986 msr spsr_el3, x16
987 msr elr_el3, x17
988
Max Shvetsovc4502772021-03-22 11:59:37 +0000989#if IMAGE_BL31
990 /* ----------------------------------------------------------
Arunachalam Ganapathycac7d162021-07-08 09:35:57 +0100991 * Restore CPTR_EL3.
Max Shvetsovc4502772021-03-22 11:59:37 +0000992 * ZCR is only restored if SVE is supported and enabled.
993 * Synchronization is required before zcr_el3 is addressed.
994 * ----------------------------------------------------------
995 */
Max Shvetsovc4502772021-03-22 11:59:37 +0000996 ldp x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
997 msr cptr_el3, x19
998
999 ands x19, x19, #CPTR_EZ_BIT
1000 beq sve_not_enabled
1001
1002 isb
1003 msr S3_6_C1_C2_0, x20 /* zcr_el3 */
1004sve_not_enabled:
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +00001005#endif /* IMAGE_BL31 */
Max Shvetsovc4502772021-03-22 11:59:37 +00001006
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +01001007#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
Alexei Fedorovf41355c2019-09-13 14:11:59 +01001008 /* ----------------------------------------------------------
1009 * Restore mitigation state as it was on entry to EL3
1010 * ----------------------------------------------------------
1011 */
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +01001012 ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
Alexei Fedorovf41355c2019-09-13 14:11:59 +01001013 cbz x17, 1f
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +01001014 blr x17
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +000010151:
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +00001016#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
1017
Manish V Badarkhee07e8082020-07-23 12:43:25 +01001018 restore_ptw_el1_sys_regs
1019
Alexei Fedorovf41355c2019-09-13 14:11:59 +01001020 /* ----------------------------------------------------------
1021 * Restore general purpose (including x30), PMCR_EL0 and
1022 * ARMv8.3-PAuth registers.
1023 * Exit EL3 via ERET to a lower exception level.
1024 * ----------------------------------------------------------
1025 */
1026 bl restore_gp_pmcr_pauth_regs
1027 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +01001028
Alexei Fedorovf41355c2019-09-13 14:11:59 +01001029#if IMAGE_BL31 && RAS_EXTENSION
1030 /* ----------------------------------------------------------
1031 * Issue Error Synchronization Barrier to synchronize SErrors
1032 * before exiting EL3. We're running with EAs unmasked, so
1033 * any synchronized errors would be taken immediately;
1034 * therefore no need to inspect DISR_EL1 register.
1035 * ----------------------------------------------------------
1036 */
1037 esb
Madhukar Pappireddyfba25722020-07-24 03:27:12 -05001038#else
1039 dsb sy
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +00001040#endif /* IMAGE_BL31 && RAS_EXTENSION */
1041
Madhukar Pappireddyfba25722020-07-24 03:27:12 -05001042#ifdef IMAGE_BL31
1043 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +00001044#endif /* IMAGE_BL31 */
1045
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -08001046 exception_return
Antonio Nino Diaz594811b2019-01-31 11:58:00 +00001047
Yatharth Kochar6c0566c2015-10-02 17:56:48 +01001048endfunc el3_exit