blob: 75e214d9c6d2678a4ac7d3a2f9a64486a4739d4a [file] [log] [blame]
Achin Gupta9ac63c52014-01-16 12:08:03 +00001/*
Madhukar Pappireddyfba25722020-07-24 03:27:12 -05002 * Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
Achin Gupta9ac63c52014-01-16 12:08:03 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta9ac63c52014-01-16 12:08:03 +00005 */
6
Dan Handley2bd4ef22014-04-09 13:14:54 +01007#include <arch.h>
Andrew Thoelke38bde412014-03-18 13:46:55 +00008#include <asm_macros.S>
Jan Dabrosfa015982019-12-02 13:30:03 +01009#include <assert_macros.S>
Dan Handley2bd4ef22014-04-09 13:14:54 +010010#include <context.h>
Manish V Badarkhee07e8082020-07-23 12:43:25 +010011#include <el3_common_macros.S>
Achin Gupta9ac63c52014-01-16 12:08:03 +000012
Max Shvetsovbdf502d2020-02-25 13:56:19 +000013#if CTX_INCLUDE_EL2_REGS
14 .global el2_sysregs_context_save
15 .global el2_sysregs_context_restore
16#endif
17
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010018 .global el1_sysregs_context_save
19 .global el1_sysregs_context_restore
20#if CTX_INCLUDE_FPREGS
21 .global fpregs_context_save
22 .global fpregs_context_restore
23#endif
Alexei Fedorovf41355c2019-09-13 14:11:59 +010024 .global save_gp_pmcr_pauth_regs
25 .global restore_gp_pmcr_pauth_regs
Manish V Badarkhee07e8082020-07-23 12:43:25 +010026 .global save_and_update_ptw_el1_sys_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010027 .global el3_exit
28
Max Shvetsovbdf502d2020-02-25 13:56:19 +000029#if CTX_INCLUDE_EL2_REGS
30
31/* -----------------------------------------------------
32 * The following function strictly follows the AArch64
33 * PCS to use x9-x17 (temporary caller-saved registers)
Max Shvetsovc9e2c922020-02-17 16:15:47 +000034 * to save EL2 system register context. It assumes that
35 * 'x0' is pointing to a 'el2_sys_regs' structure where
Max Shvetsovbdf502d2020-02-25 13:56:19 +000036 * the register context will be saved.
Max Shvetsovc9e2c922020-02-17 16:15:47 +000037 *
38 * The following registers are not added.
39 * AMEVCNTVOFF0<n>_EL2
40 * AMEVCNTVOFF1<n>_EL2
41 * ICH_AP0R<n>_EL2
42 * ICH_AP1R<n>_EL2
43 * ICH_LR<n>_EL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +000044 * -----------------------------------------------------
45 */
Max Shvetsovbdf502d2020-02-25 13:56:19 +000046
Max Shvetsovc9e2c922020-02-17 16:15:47 +000047func el2_sysregs_context_save
Max Shvetsovbdf502d2020-02-25 13:56:19 +000048 mrs x9, actlr_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000049 mrs x10, afsr0_el2
50 stp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000051
Max Shvetsovc9e2c922020-02-17 16:15:47 +000052 mrs x11, afsr1_el2
53 mrs x12, amair_el2
54 stp x11, x12, [x0, #CTX_AFSR1_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000055
Max Shvetsovc9e2c922020-02-17 16:15:47 +000056 mrs x13, cnthctl_el2
57 mrs x14, cnthp_ctl_el2
58 stp x13, x14, [x0, #CTX_CNTHCTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000059
Max Shvetsovc9e2c922020-02-17 16:15:47 +000060 mrs x15, cnthp_cval_el2
61 mrs x16, cnthp_tval_el2
62 stp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000063
Max Shvetsovc9e2c922020-02-17 16:15:47 +000064 mrs x17, cntvoff_el2
Max Shvetsovbdf502d2020-02-25 13:56:19 +000065 mrs x9, cptr_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000066 stp x17, x9, [x0, #CTX_CNTVOFF_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000067
Max Shvetsovc9e2c922020-02-17 16:15:47 +000068 mrs x11, elr_el2
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +010069#if CTX_INCLUDE_AARCH32_REGS
70 mrs x10, dbgvcr32_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000071 stp x10, x11, [x0, #CTX_DBGVCR32_EL2]
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +010072#else
73 str x11, [x0, #CTX_ELR_EL2]
74#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +000075
Max Shvetsovc9e2c922020-02-17 16:15:47 +000076 mrs x14, esr_el2
77 mrs x15, far_el2
78 stp x14, x15, [x0, #CTX_ESR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000079
Max Shvetsov0c16abd2020-05-13 18:15:39 +010080 mrs x16, hacr_el2
81 mrs x17, hcr_el2
82 stp x16, x17, [x0, #CTX_HACR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000083
Max Shvetsov0c16abd2020-05-13 18:15:39 +010084 mrs x9, hpfar_el2
85 mrs x10, hstr_el2
86 stp x9, x10, [x0, #CTX_HPFAR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000087
Max Shvetsov0c16abd2020-05-13 18:15:39 +010088 mrs x11, ICC_SRE_EL2
89 mrs x12, ICH_HCR_EL2
90 stp x11, x12, [x0, #CTX_ICC_SRE_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000091
Max Shvetsov0c16abd2020-05-13 18:15:39 +010092 mrs x13, ICH_VMCR_EL2
93 mrs x14, mair_el2
94 stp x13, x14, [x0, #CTX_ICH_VMCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000095
Max Shvetsov0c16abd2020-05-13 18:15:39 +010096 mrs x15, mdcr_el2
Arunachalam Ganapathy04b7e432020-10-09 14:51:41 +010097#if ENABLE_SPE_FOR_LOWER_ELS
Max Shvetsov0c16abd2020-05-13 18:15:39 +010098 mrs x16, PMSCR_EL2
99 stp x15, x16, [x0, #CTX_MDCR_EL2]
Arunachalam Ganapathy04b7e432020-10-09 14:51:41 +0100100#else
101 str x15, [x0, #CTX_MDCR_EL2]
102#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000103
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100104 mrs x17, sctlr_el2
105 mrs x9, spsr_el2
106 stp x17, x9, [x0, #CTX_SCTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000107
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100108 mrs x10, sp_el2
109 mrs x11, tcr_el2
110 stp x10, x11, [x0, #CTX_SP_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000111
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100112 mrs x12, tpidr_el2
113 mrs x13, ttbr0_el2
114 stp x12, x13, [x0, #CTX_TPIDR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000115
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100116 mrs x14, vbar_el2
117 mrs x15, vmpidr_el2
118 stp x14, x15, [x0, #CTX_VBAR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000119
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100120 mrs x16, vpidr_el2
121 mrs x17, vtcr_el2
122 stp x16, x17, [x0, #CTX_VPIDR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000123
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100124 mrs x9, vttbr_el2
125 str x9, [x0, #CTX_VTTBR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000126
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000127#if CTX_INCLUDE_MTE_REGS
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100128 mrs x10, TFSR_EL2
129 str x10, [x0, #CTX_TFSR_EL2]
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000130#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000131
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000132#if ENABLE_MPAM_FOR_LOWER_ELS
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000133 mrs x9, MPAM2_EL2
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000134 mrs x10, MPAMHCR_EL2
135 stp x9, x10, [x0, #CTX_MPAM2_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000136
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000137 mrs x11, MPAMVPM0_EL2
138 mrs x12, MPAMVPM1_EL2
139 stp x11, x12, [x0, #CTX_MPAMVPM0_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000140
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000141 mrs x13, MPAMVPM2_EL2
142 mrs x14, MPAMVPM3_EL2
143 stp x13, x14, [x0, #CTX_MPAMVPM2_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000144
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000145 mrs x15, MPAMVPM4_EL2
146 mrs x16, MPAMVPM5_EL2
147 stp x15, x16, [x0, #CTX_MPAMVPM4_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000148
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000149 mrs x17, MPAMVPM6_EL2
150 mrs x9, MPAMVPM7_EL2
151 stp x17, x9, [x0, #CTX_MPAMVPM6_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000152
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000153 mrs x10, MPAMVPMV_EL2
154 str x10, [x0, #CTX_MPAMVPMV_EL2]
155#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000156
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000157
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000158#if ARM_ARCH_AT_LEAST(8, 6)
159 mrs x11, HAFGRTR_EL2
160 mrs x12, HDFGRTR_EL2
161 stp x11, x12, [x0, #CTX_HAFGRTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000162
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000163 mrs x13, HDFGWTR_EL2
164 mrs x14, HFGITR_EL2
165 stp x13, x14, [x0, #CTX_HDFGWTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000166
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000167 mrs x15, HFGRTR_EL2
168 mrs x16, HFGWTR_EL2
169 stp x15, x16, [x0, #CTX_HFGRTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000170
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000171 mrs x17, CNTPOFF_EL2
172 str x17, [x0, #CTX_CNTPOFF_EL2]
173#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000174
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000175#if ARM_ARCH_AT_LEAST(8, 4)
176 mrs x9, cnthps_ctl_el2
177 mrs x10, cnthps_cval_el2
178 stp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000179
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000180 mrs x11, cnthps_tval_el2
181 mrs x12, cnthvs_ctl_el2
182 stp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000183
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000184 mrs x13, cnthvs_cval_el2
185 mrs x14, cnthvs_tval_el2
186 stp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000187
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000188 mrs x15, cnthv_ctl_el2
189 mrs x16, cnthv_cval_el2
190 stp x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000191
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000192 mrs x17, cnthv_tval_el2
193 mrs x9, contextidr_el2
194 stp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000195
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +0100196#if CTX_INCLUDE_AARCH32_REGS
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000197 mrs x10, sder32_el2
198 str x10, [x0, #CTX_SDER32_EL2]
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +0100199#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000200
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000201 mrs x11, ttbr1_el2
202 str x11, [x0, #CTX_TTBR1_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000203
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000204 mrs x12, vdisr_el2
205 str x12, [x0, #CTX_VDISR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000206
Arunachalam Ganapathydd3ec7e2020-05-28 11:57:09 +0100207#if CTX_INCLUDE_NEVE_REGS
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000208 mrs x13, vncr_el2
209 str x13, [x0, #CTX_VNCR_EL2]
Arunachalam Ganapathydd3ec7e2020-05-28 11:57:09 +0100210#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000211
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000212 mrs x14, vsesr_el2
213 str x14, [x0, #CTX_VSESR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000214
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000215 mrs x15, vstcr_el2
216 str x15, [x0, #CTX_VSTCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000217
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000218 mrs x16, vsttbr_el2
219 str x16, [x0, #CTX_VSTTBR_EL2]
Olivier Deprez19628912020-03-20 14:22:05 +0100220
221 mrs x17, TRFCR_EL2
222 str x17, [x0, #CTX_TRFCR_EL2]
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000223#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000224
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000225#if ARM_ARCH_AT_LEAST(8, 5)
Olivier Deprez19628912020-03-20 14:22:05 +0100226 mrs x9, scxtnum_el2
227 str x9, [x0, #CTX_SCXTNUM_EL2]
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000228#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000229
230 ret
231endfunc el2_sysregs_context_save
232
233/* -----------------------------------------------------
234 * The following function strictly follows the AArch64
235 * PCS to use x9-x17 (temporary caller-saved registers)
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000236 * to restore EL2 system register context. It assumes
237 * that 'x0' is pointing to a 'el2_sys_regs' structure
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000238 * from where the register context will be restored
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000239
240 * The following registers are not restored
241 * AMEVCNTVOFF0<n>_EL2
242 * AMEVCNTVOFF1<n>_EL2
243 * ICH_AP0R<n>_EL2
244 * ICH_AP1R<n>_EL2
245 * ICH_LR<n>_EL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000246 * -----------------------------------------------------
247 */
248func el2_sysregs_context_restore
249
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000250 ldp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000251 msr actlr_el2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000252 msr afsr0_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000253
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000254 ldp x11, x12, [x0, #CTX_AFSR1_EL2]
255 msr afsr1_el2, x11
256 msr amair_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000257
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000258 ldp x13, x14, [x0, #CTX_CNTHCTL_EL2]
259 msr cnthctl_el2, x13
260 msr cnthp_ctl_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000261
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000262 ldp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
263 msr cnthp_cval_el2, x15
264 msr cnthp_tval_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000265
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000266 ldp x17, x9, [x0, #CTX_CNTVOFF_EL2]
267 msr cntvoff_el2, x17
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000268 msr cptr_el2, x9
269
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +0100270#if CTX_INCLUDE_AARCH32_REGS
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000271 ldp x10, x11, [x0, #CTX_DBGVCR32_EL2]
272 msr dbgvcr32_el2, x10
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +0100273#else
274 ldr x11, [x0, #CTX_ELR_EL2]
275#endif
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000276 msr elr_el2, x11
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000277
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000278 ldp x14, x15, [x0, #CTX_ESR_EL2]
279 msr esr_el2, x14
280 msr far_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000281
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100282 ldp x16, x17, [x0, #CTX_HACR_EL2]
283 msr hacr_el2, x16
284 msr hcr_el2, x17
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000285
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100286 ldp x9, x10, [x0, #CTX_HPFAR_EL2]
287 msr hpfar_el2, x9
288 msr hstr_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000289
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100290 ldp x11, x12, [x0, #CTX_ICC_SRE_EL2]
291 msr ICC_SRE_EL2, x11
292 msr ICH_HCR_EL2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000293
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100294 ldp x13, x14, [x0, #CTX_ICH_VMCR_EL2]
295 msr ICH_VMCR_EL2, x13
296 msr mair_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000297
Arunachalam Ganapathy04b7e432020-10-09 14:51:41 +0100298#if ENABLE_SPE_FOR_LOWER_ELS
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100299 ldp x15, x16, [x0, #CTX_MDCR_EL2]
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100300 msr PMSCR_EL2, x16
Arunachalam Ganapathy04b7e432020-10-09 14:51:41 +0100301#else
302 ldr x15, [x0, #CTX_MDCR_EL2]
303#endif
304 msr mdcr_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000305
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100306 ldp x17, x9, [x0, #CTX_SCTLR_EL2]
307 msr sctlr_el2, x17
308 msr spsr_el2, x9
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000309
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100310 ldp x10, x11, [x0, #CTX_SP_EL2]
311 msr sp_el2, x10
312 msr tcr_el2, x11
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000313
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100314 ldp x12, x13, [x0, #CTX_TPIDR_EL2]
315 msr tpidr_el2, x12
316 msr ttbr0_el2, x13
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000317
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100318 ldp x13, x14, [x0, #CTX_VBAR_EL2]
319 msr vbar_el2, x13
320 msr vmpidr_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000321
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100322 ldp x15, x16, [x0, #CTX_VPIDR_EL2]
323 msr vpidr_el2, x15
324 msr vtcr_el2, x16
325
326 ldr x17, [x0, #CTX_VTTBR_EL2]
327 msr vttbr_el2, x17
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000328
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000329#if CTX_INCLUDE_MTE_REGS
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100330 ldr x9, [x0, #CTX_TFSR_EL2]
331 msr TFSR_EL2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000332#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000333
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000334#if ENABLE_MPAM_FOR_LOWER_ELS
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100335 ldp x10, x11, [x0, #CTX_MPAM2_EL2]
336 msr MPAM2_EL2, x10
337 msr MPAMHCR_EL2, x11
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000338
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100339 ldp x12, x13, [x0, #CTX_MPAMVPM0_EL2]
340 msr MPAMVPM0_EL2, x12
341 msr MPAMVPM1_EL2, x13
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000342
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100343 ldp x14, x15, [x0, #CTX_MPAMVPM2_EL2]
344 msr MPAMVPM2_EL2, x14
345 msr MPAMVPM3_EL2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000346
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100347 ldp x16, x17, [x0, #CTX_MPAMVPM4_EL2]
348 msr MPAMVPM4_EL2, x16
349 msr MPAMVPM5_EL2, x17
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000350
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100351 ldp x9, x10, [x0, #CTX_MPAMVPM6_EL2]
352 msr MPAMVPM6_EL2, x9
353 msr MPAMVPM7_EL2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000354
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100355 ldr x11, [x0, #CTX_MPAMVPMV_EL2]
356 msr MPAMVPMV_EL2, x11
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000357#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000358
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000359#if ARM_ARCH_AT_LEAST(8, 6)
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100360 ldp x12, x13, [x0, #CTX_HAFGRTR_EL2]
361 msr HAFGRTR_EL2, x12
362 msr HDFGRTR_EL2, x13
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000363
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100364 ldp x14, x15, [x0, #CTX_HDFGWTR_EL2]
365 msr HDFGWTR_EL2, x14
366 msr HFGITR_EL2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000367
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100368 ldp x16, x17, [x0, #CTX_HFGRTR_EL2]
369 msr HFGRTR_EL2, x16
370 msr HFGWTR_EL2, x17
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000371
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100372 ldr x9, [x0, #CTX_CNTPOFF_EL2]
373 msr CNTPOFF_EL2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000374#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000375
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000376#if ARM_ARCH_AT_LEAST(8, 4)
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100377 ldp x10, x11, [x0, #CTX_CNTHPS_CTL_EL2]
378 msr cnthps_ctl_el2, x10
379 msr cnthps_cval_el2, x11
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000380
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100381 ldp x12, x13, [x0, #CTX_CNTHPS_TVAL_EL2]
382 msr cnthps_tval_el2, x12
383 msr cnthvs_ctl_el2, x13
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000384
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100385 ldp x14, x15, [x0, #CTX_CNTHVS_CVAL_EL2]
386 msr cnthvs_cval_el2, x14
387 msr cnthvs_tval_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000388
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100389 ldp x16, x17, [x0, #CTX_CNTHV_CTL_EL2]
390 msr cnthv_ctl_el2, x16
391 msr cnthv_cval_el2, x17
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000392
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100393 ldp x9, x10, [x0, #CTX_CNTHV_TVAL_EL2]
394 msr cnthv_tval_el2, x9
395 msr contextidr_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000396
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +0100397#if CTX_INCLUDE_AARCH32_REGS
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100398 ldr x11, [x0, #CTX_SDER32_EL2]
399 msr sder32_el2, x11
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +0100400#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000401
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100402 ldr x12, [x0, #CTX_TTBR1_EL2]
403 msr ttbr1_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000404
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100405 ldr x13, [x0, #CTX_VDISR_EL2]
406 msr vdisr_el2, x13
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000407
Arunachalam Ganapathydd3ec7e2020-05-28 11:57:09 +0100408#if CTX_INCLUDE_NEVE_REGS
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100409 ldr x14, [x0, #CTX_VNCR_EL2]
410 msr vncr_el2, x14
Arunachalam Ganapathydd3ec7e2020-05-28 11:57:09 +0100411#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000412
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100413 ldr x15, [x0, #CTX_VSESR_EL2]
414 msr vsesr_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000415
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100416 ldr x16, [x0, #CTX_VSTCR_EL2]
417 msr vstcr_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000418
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100419 ldr x17, [x0, #CTX_VSTTBR_EL2]
420 msr vsttbr_el2, x17
Olivier Deprez19628912020-03-20 14:22:05 +0100421
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100422 ldr x9, [x0, #CTX_TRFCR_EL2]
423 msr TRFCR_EL2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000424#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000425
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000426#if ARM_ARCH_AT_LEAST(8, 5)
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100427 ldr x10, [x0, #CTX_SCXTNUM_EL2]
428 msr scxtnum_el2, x10
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000429#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000430
431 ret
432endfunc el2_sysregs_context_restore
433
434#endif /* CTX_INCLUDE_EL2_REGS */
435
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100436/* ------------------------------------------------------------------
437 * The following function strictly follows the AArch64 PCS to use
438 * x9-x17 (temporary caller-saved registers) to save EL1 system
439 * register context. It assumes that 'x0' is pointing to a
440 * 'el1_sys_regs' structure where the register context will be saved.
441 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000442 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000443func el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000444
445 mrs x9, spsr_el1
446 mrs x10, elr_el1
447 stp x9, x10, [x0, #CTX_SPSR_EL1]
448
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100449#if !ERRATA_SPECULATIVE_AT
Achin Gupta9ac63c52014-01-16 12:08:03 +0000450 mrs x15, sctlr_el1
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100451 mrs x16, tcr_el1
Achin Gupta9ac63c52014-01-16 12:08:03 +0000452 stp x15, x16, [x0, #CTX_SCTLR_EL1]
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100453#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000454
455 mrs x17, cpacr_el1
456 mrs x9, csselr_el1
457 stp x17, x9, [x0, #CTX_CPACR_EL1]
458
459 mrs x10, sp_el1
460 mrs x11, esr_el1
461 stp x10, x11, [x0, #CTX_SP_EL1]
462
463 mrs x12, ttbr0_el1
464 mrs x13, ttbr1_el1
465 stp x12, x13, [x0, #CTX_TTBR0_EL1]
466
467 mrs x14, mair_el1
468 mrs x15, amair_el1
469 stp x14, x15, [x0, #CTX_MAIR_EL1]
470
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100471 mrs x16, actlr_el1
Achin Gupta9ac63c52014-01-16 12:08:03 +0000472 mrs x17, tpidr_el1
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100473 stp x16, x17, [x0, #CTX_ACTLR_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000474
475 mrs x9, tpidr_el0
476 mrs x10, tpidrro_el0
477 stp x9, x10, [x0, #CTX_TPIDR_EL0]
478
Achin Gupta9ac63c52014-01-16 12:08:03 +0000479 mrs x13, par_el1
480 mrs x14, far_el1
481 stp x13, x14, [x0, #CTX_PAR_EL1]
482
483 mrs x15, afsr0_el1
484 mrs x16, afsr1_el1
485 stp x15, x16, [x0, #CTX_AFSR0_EL1]
486
487 mrs x17, contextidr_el1
488 mrs x9, vbar_el1
489 stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
490
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100491 /* Save AArch32 system registers if the build has instructed so */
492#if CTX_INCLUDE_AARCH32_REGS
493 mrs x11, spsr_abt
494 mrs x12, spsr_und
495 stp x11, x12, [x0, #CTX_SPSR_ABT]
496
497 mrs x13, spsr_irq
498 mrs x14, spsr_fiq
499 stp x13, x14, [x0, #CTX_SPSR_IRQ]
500
501 mrs x15, dacr32_el2
502 mrs x16, ifsr32_el2
503 stp x15, x16, [x0, #CTX_DACR32_EL2]
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100504#endif
505
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100506 /* Save NS timer registers if the build has instructed so */
507#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000508 mrs x10, cntp_ctl_el0
509 mrs x11, cntp_cval_el0
510 stp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
511
512 mrs x12, cntv_ctl_el0
513 mrs x13, cntv_cval_el0
514 stp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
515
516 mrs x14, cntkctl_el1
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100517 str x14, [x0, #CTX_CNTKCTL_EL1]
518#endif
519
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100520 /* Save MTE system registers if the build has instructed so */
521#if CTX_INCLUDE_MTE_REGS
522 mrs x15, TFSRE0_EL1
523 mrs x16, TFSR_EL1
524 stp x15, x16, [x0, #CTX_TFSRE0_EL1]
525
526 mrs x9, RGSR_EL1
527 mrs x10, GCR_EL1
528 stp x9, x10, [x0, #CTX_RGSR_EL1]
529#endif
530
Achin Gupta9ac63c52014-01-16 12:08:03 +0000531 ret
Kévin Petita877c252015-03-24 14:03:57 +0000532endfunc el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000533
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100534/* ------------------------------------------------------------------
535 * The following function strictly follows the AArch64 PCS to use
536 * x9-x17 (temporary caller-saved registers) to restore EL1 system
537 * register context. It assumes that 'x0' is pointing to a
538 * 'el1_sys_regs' structure from where the register context will be
539 * restored
540 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000541 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000542func el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000543
544 ldp x9, x10, [x0, #CTX_SPSR_EL1]
545 msr spsr_el1, x9
546 msr elr_el1, x10
547
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100548#if !ERRATA_SPECULATIVE_AT
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100549 ldp x15, x16, [x0, #CTX_SCTLR_EL1]
550 msr sctlr_el1, x15
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100551 msr tcr_el1, x16
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100552#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000553
554 ldp x17, x9, [x0, #CTX_CPACR_EL1]
555 msr cpacr_el1, x17
556 msr csselr_el1, x9
557
558 ldp x10, x11, [x0, #CTX_SP_EL1]
559 msr sp_el1, x10
560 msr esr_el1, x11
561
562 ldp x12, x13, [x0, #CTX_TTBR0_EL1]
563 msr ttbr0_el1, x12
564 msr ttbr1_el1, x13
565
566 ldp x14, x15, [x0, #CTX_MAIR_EL1]
567 msr mair_el1, x14
568 msr amair_el1, x15
569
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100570 ldp x16, x17, [x0, #CTX_ACTLR_EL1]
571 msr actlr_el1, x16
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100572 msr tpidr_el1, x17
Achin Gupta9ac63c52014-01-16 12:08:03 +0000573
574 ldp x9, x10, [x0, #CTX_TPIDR_EL0]
575 msr tpidr_el0, x9
576 msr tpidrro_el0, x10
577
Achin Gupta9ac63c52014-01-16 12:08:03 +0000578 ldp x13, x14, [x0, #CTX_PAR_EL1]
579 msr par_el1, x13
580 msr far_el1, x14
581
582 ldp x15, x16, [x0, #CTX_AFSR0_EL1]
583 msr afsr0_el1, x15
584 msr afsr1_el1, x16
585
586 ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
587 msr contextidr_el1, x17
588 msr vbar_el1, x9
589
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100590 /* Restore AArch32 system registers if the build has instructed so */
591#if CTX_INCLUDE_AARCH32_REGS
592 ldp x11, x12, [x0, #CTX_SPSR_ABT]
593 msr spsr_abt, x11
594 msr spsr_und, x12
595
596 ldp x13, x14, [x0, #CTX_SPSR_IRQ]
597 msr spsr_irq, x13
598 msr spsr_fiq, x14
599
600 ldp x15, x16, [x0, #CTX_DACR32_EL2]
601 msr dacr32_el2, x15
602 msr ifsr32_el2, x16
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100603#endif
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100604 /* Restore NS timer registers if the build has instructed so */
605#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000606 ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
607 msr cntp_ctl_el0, x10
608 msr cntp_cval_el0, x11
609
610 ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
611 msr cntv_ctl_el0, x12
612 msr cntv_cval_el0, x13
613
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100614 ldr x14, [x0, #CTX_CNTKCTL_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000615 msr cntkctl_el1, x14
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100616#endif
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100617 /* Restore MTE system registers if the build has instructed so */
618#if CTX_INCLUDE_MTE_REGS
619 ldp x11, x12, [x0, #CTX_TFSRE0_EL1]
620 msr TFSRE0_EL1, x11
621 msr TFSR_EL1, x12
622
623 ldp x13, x14, [x0, #CTX_RGSR_EL1]
624 msr RGSR_EL1, x13
625 msr GCR_EL1, x14
626#endif
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100627
Achin Gupta9ac63c52014-01-16 12:08:03 +0000628 /* No explict ISB required here as ERET covers it */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000629 ret
Kévin Petita877c252015-03-24 14:03:57 +0000630endfunc el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000631
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100632/* ------------------------------------------------------------------
633 * The following function follows the aapcs_64 strictly to use
634 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
635 * to save floating point register context. It assumes that 'x0' is
636 * pointing to a 'fp_regs' structure where the register context will
Achin Gupta9ac63c52014-01-16 12:08:03 +0000637 * be saved.
638 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100639 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
640 * However currently we don't use VFP registers nor set traps in
641 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000642 *
643 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100644 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000645 */
Juan Castillo258e94f2014-06-25 17:26:36 +0100646#if CTX_INCLUDE_FPREGS
Andrew Thoelke38bde412014-03-18 13:46:55 +0000647func fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000648 stp q0, q1, [x0, #CTX_FP_Q0]
649 stp q2, q3, [x0, #CTX_FP_Q2]
650 stp q4, q5, [x0, #CTX_FP_Q4]
651 stp q6, q7, [x0, #CTX_FP_Q6]
652 stp q8, q9, [x0, #CTX_FP_Q8]
653 stp q10, q11, [x0, #CTX_FP_Q10]
654 stp q12, q13, [x0, #CTX_FP_Q12]
655 stp q14, q15, [x0, #CTX_FP_Q14]
656 stp q16, q17, [x0, #CTX_FP_Q16]
657 stp q18, q19, [x0, #CTX_FP_Q18]
658 stp q20, q21, [x0, #CTX_FP_Q20]
659 stp q22, q23, [x0, #CTX_FP_Q22]
660 stp q24, q25, [x0, #CTX_FP_Q24]
661 stp q26, q27, [x0, #CTX_FP_Q26]
662 stp q28, q29, [x0, #CTX_FP_Q28]
663 stp q30, q31, [x0, #CTX_FP_Q30]
664
665 mrs x9, fpsr
666 str x9, [x0, #CTX_FP_FPSR]
667
668 mrs x10, fpcr
669 str x10, [x0, #CTX_FP_FPCR]
670
David Cunadod1a1fd42017-10-20 11:30:57 +0100671#if CTX_INCLUDE_AARCH32_REGS
672 mrs x11, fpexc32_el2
673 str x11, [x0, #CTX_FP_FPEXC32_EL2]
674#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000675 ret
Kévin Petita877c252015-03-24 14:03:57 +0000676endfunc fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000677
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100678/* ------------------------------------------------------------------
679 * The following function follows the aapcs_64 strictly to use x9-x17
680 * (temporary caller-saved registers according to AArch64 PCS) to
681 * restore floating point register context. It assumes that 'x0' is
682 * pointing to a 'fp_regs' structure from where the register context
Achin Gupta9ac63c52014-01-16 12:08:03 +0000683 * will be restored.
684 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100685 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
686 * However currently we don't use VFP registers nor set traps in
687 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000688 *
689 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100690 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000691 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000692func fpregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000693 ldp q0, q1, [x0, #CTX_FP_Q0]
694 ldp q2, q3, [x0, #CTX_FP_Q2]
695 ldp q4, q5, [x0, #CTX_FP_Q4]
696 ldp q6, q7, [x0, #CTX_FP_Q6]
697 ldp q8, q9, [x0, #CTX_FP_Q8]
698 ldp q10, q11, [x0, #CTX_FP_Q10]
699 ldp q12, q13, [x0, #CTX_FP_Q12]
700 ldp q14, q15, [x0, #CTX_FP_Q14]
701 ldp q16, q17, [x0, #CTX_FP_Q16]
702 ldp q18, q19, [x0, #CTX_FP_Q18]
703 ldp q20, q21, [x0, #CTX_FP_Q20]
704 ldp q22, q23, [x0, #CTX_FP_Q22]
705 ldp q24, q25, [x0, #CTX_FP_Q24]
706 ldp q26, q27, [x0, #CTX_FP_Q26]
707 ldp q28, q29, [x0, #CTX_FP_Q28]
708 ldp q30, q31, [x0, #CTX_FP_Q30]
709
710 ldr x9, [x0, #CTX_FP_FPSR]
711 msr fpsr, x9
712
Soby Mathewe77e1162015-12-03 09:42:50 +0000713 ldr x10, [x0, #CTX_FP_FPCR]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000714 msr fpcr, x10
715
David Cunadod1a1fd42017-10-20 11:30:57 +0100716#if CTX_INCLUDE_AARCH32_REGS
717 ldr x11, [x0, #CTX_FP_FPEXC32_EL2]
718 msr fpexc32_el2, x11
719#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000720 /*
721 * No explict ISB required here as ERET to
Sandrine Bailleuxf4119ec2015-12-17 13:58:58 +0000722 * switch to secure EL1 or non-secure world
Achin Gupta9ac63c52014-01-16 12:08:03 +0000723 * covers it
724 */
725
726 ret
Kévin Petita877c252015-03-24 14:03:57 +0000727endfunc fpregs_context_restore
Juan Castillo258e94f2014-06-25 17:26:36 +0100728#endif /* CTX_INCLUDE_FPREGS */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100729
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100730/* ------------------------------------------------------------------
731 * The following function is used to save and restore all the general
732 * purpose and ARMv8.3-PAuth (if enabled) registers.
733 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
734 * when ARMv8.5-PMU is implemented, and if called from Non-secure
735 * state saves PMCR_EL0 and disables Cycle Counter.
736 *
737 * Ideally we would only save and restore the callee saved registers
738 * when a world switch occurs but that type of implementation is more
739 * complex. So currently we will always save and restore these
740 * registers on entry and exit of EL3.
741 * These are not macros to ensure their invocation fits within the 32
742 * instructions per exception vector.
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100743 * clobbers: x18
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100744 * ------------------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100745 */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100746func save_gp_pmcr_pauth_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100747 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
748 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
749 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
750 stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
751 stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
752 stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
753 stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
754 stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
755 stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
756 stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
757 stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
758 stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
759 stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
760 stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
761 stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
762 mrs x18, sp_el0
763 str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100764
765 /* ----------------------------------------------------------
766 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
767 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
768 * should be saved in non-secure context.
769 * ----------------------------------------------------------
770 */
771 mrs x9, mdcr_el3
772 tst x9, #MDCR_SCCD_BIT
773 bne 1f
774
775 /* Secure Cycle Counter is not disabled */
776 mrs x9, pmcr_el0
777
778 /* Check caller's security state */
779 mrs x10, scr_el3
780 tst x10, #SCR_NS_BIT
781 beq 2f
782
783 /* Save PMCR_EL0 if called from Non-secure state */
784 str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
785
786 /* Disable cycle counter when event counting is prohibited */
7872: orr x9, x9, #PMCR_EL0_DP_BIT
788 msr pmcr_el0, x9
789 isb
7901:
791#if CTX_INCLUDE_PAUTH_REGS
792 /* ----------------------------------------------------------
793 * Save the ARMv8.3-PAuth keys as they are not banked
794 * by exception level
795 * ----------------------------------------------------------
796 */
797 add x19, sp, #CTX_PAUTH_REGS_OFFSET
798
799 mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */
800 mrs x21, APIAKeyHi_EL1
801 mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */
802 mrs x23, APIBKeyHi_EL1
803 mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */
804 mrs x25, APDAKeyHi_EL1
805 mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */
806 mrs x27, APDBKeyHi_EL1
807 mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */
808 mrs x29, APGAKeyHi_EL1
809
810 stp x20, x21, [x19, #CTX_PACIAKEY_LO]
811 stp x22, x23, [x19, #CTX_PACIBKEY_LO]
812 stp x24, x25, [x19, #CTX_PACDAKEY_LO]
813 stp x26, x27, [x19, #CTX_PACDBKEY_LO]
814 stp x28, x29, [x19, #CTX_PACGAKEY_LO]
815#endif /* CTX_INCLUDE_PAUTH_REGS */
816
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100817 ret
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100818endfunc save_gp_pmcr_pauth_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100819
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100820/* ------------------------------------------------------------------
821 * This function restores ARMv8.3-PAuth (if enabled) and all general
822 * purpose registers except x30 from the CPU context.
823 * x30 register must be explicitly restored by the caller.
824 * ------------------------------------------------------------------
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000825 */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100826func restore_gp_pmcr_pauth_regs
827#if CTX_INCLUDE_PAUTH_REGS
828 /* Restore the ARMv8.3 PAuth keys */
829 add x10, sp, #CTX_PAUTH_REGS_OFFSET
830
831 ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */
832 ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */
833 ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */
834 ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */
835 ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */
836
837 msr APIAKeyLo_EL1, x0
838 msr APIAKeyHi_EL1, x1
839 msr APIBKeyLo_EL1, x2
840 msr APIBKeyHi_EL1, x3
841 msr APDAKeyLo_EL1, x4
842 msr APDAKeyHi_EL1, x5
843 msr APDBKeyLo_EL1, x6
844 msr APDBKeyHi_EL1, x7
845 msr APGAKeyLo_EL1, x8
846 msr APGAKeyHi_EL1, x9
847#endif /* CTX_INCLUDE_PAUTH_REGS */
848
849 /* ----------------------------------------------------------
850 * Restore PMCR_EL0 when returning to Non-secure state if
851 * Secure Cycle Counter is not disabled in MDCR_EL3 when
852 * ARMv8.5-PMU is implemented.
853 * ----------------------------------------------------------
854 */
855 mrs x0, scr_el3
856 tst x0, #SCR_NS_BIT
857 beq 2f
858
859 /* ----------------------------------------------------------
860 * Back to Non-secure state.
861 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
862 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
863 * should be restored from non-secure context.
864 * ----------------------------------------------------------
865 */
866 mrs x0, mdcr_el3
867 tst x0, #MDCR_SCCD_BIT
868 bne 2f
869 ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
870 msr pmcr_el0, x0
8712:
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100872 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
873 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100874 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
875 ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
876 ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
877 ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
878 ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
879 ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000880 ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100881 ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
882 ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
883 ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
884 ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
885 ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000886 ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
887 msr sp_el0, x28
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100888 ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000889 ret
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100890endfunc restore_gp_pmcr_pauth_regs
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000891
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100892/*
893 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
894 * registers and update EL1 registers to disable stage1 and stage2
895 * page table walk
896 */
897func save_and_update_ptw_el1_sys_regs
898 /* ----------------------------------------------------------
899 * Save only sctlr_el1 and tcr_el1 registers
900 * ----------------------------------------------------------
901 */
902 mrs x29, sctlr_el1
903 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
904 mrs x29, tcr_el1
905 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
906
907 /* ------------------------------------------------------------
908 * Must follow below order in order to disable page table
909 * walk for lower ELs (EL1 and EL0). First step ensures that
910 * page table walk is disabled for stage1 and second step
911 * ensures that page table walker should use TCR_EL1.EPDx
912 * bits to perform address translation. ISB ensures that CPU
913 * does these 2 steps in order.
914 *
915 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
916 * stage1.
917 * 2. Enable MMU bit to avoid identity mapping via stage2
918 * and force TCR_EL1.EPDx to be used by the page table
919 * walker.
920 * ------------------------------------------------------------
921 */
922 orr x29, x29, #(TCR_EPD0_BIT)
923 orr x29, x29, #(TCR_EPD1_BIT)
924 msr tcr_el1, x29
925 isb
926 mrs x29, sctlr_el1
927 orr x29, x29, #SCTLR_M_BIT
928 msr sctlr_el1, x29
929 isb
930
931 ret
932endfunc save_and_update_ptw_el1_sys_regs
933
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100934/* ------------------------------------------------------------------
935 * This routine assumes that the SP_EL3 is pointing to a valid
936 * context structure from where the gp regs and other special
937 * registers can be retrieved.
938 * ------------------------------------------------------------------
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +0000939 */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100940func el3_exit
Jan Dabrosfa015982019-12-02 13:30:03 +0100941#if ENABLE_ASSERTIONS
942 /* el3_exit assumes SP_EL0 on entry */
943 mrs x17, spsel
944 cmp x17, #MODE_SP_EL0
945 ASM_ASSERT(eq)
946#endif
947
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100948 /* ----------------------------------------------------------
949 * Save the current SP_EL0 i.e. the EL3 runtime stack which
950 * will be used for handling the next SMC.
951 * Then switch to SP_EL3.
952 * ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100953 */
954 mov x17, sp
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100955 msr spsel, #MODE_SP_ELX
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100956 str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
957
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100958 /* ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100959 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100960 * ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100961 */
962 ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
963 ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
964 msr scr_el3, x18
965 msr spsr_el3, x16
966 msr elr_el3, x17
967
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100968#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100969 /* ----------------------------------------------------------
970 * Restore mitigation state as it was on entry to EL3
971 * ----------------------------------------------------------
972 */
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100973 ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100974 cbz x17, 1f
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100975 blr x17
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +00009761:
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100977#endif
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100978 restore_ptw_el1_sys_regs
979
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100980 /* ----------------------------------------------------------
981 * Restore general purpose (including x30), PMCR_EL0 and
982 * ARMv8.3-PAuth registers.
983 * Exit EL3 via ERET to a lower exception level.
984 * ----------------------------------------------------------
985 */
986 bl restore_gp_pmcr_pauth_regs
987 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100988
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100989#if IMAGE_BL31 && RAS_EXTENSION
990 /* ----------------------------------------------------------
991 * Issue Error Synchronization Barrier to synchronize SErrors
992 * before exiting EL3. We're running with EAs unmasked, so
993 * any synchronized errors would be taken immediately;
994 * therefore no need to inspect DISR_EL1 register.
995 * ----------------------------------------------------------
996 */
997 esb
Madhukar Pappireddyfba25722020-07-24 03:27:12 -0500998#else
999 dsb sy
1000#endif
1001#ifdef IMAGE_BL31
1002 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
Antonio Nino Diaz594811b2019-01-31 11:58:00 +00001003#endif
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -08001004 exception_return
Antonio Nino Diaz594811b2019-01-31 11:58:00 +00001005
Yatharth Kochar6c0566c2015-10-02 17:56:48 +01001006endfunc el3_exit