blob: 715406487d9a7e7a1758f283f5ae23987ef53f3f [file] [log] [blame]
Achin Gupta9ac63c52014-01-16 12:08:03 +00001/*
Rohit Mathew3dc3cad2022-11-11 18:45:11 +00002 * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
Achin Gupta9ac63c52014-01-16 12:08:03 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta9ac63c52014-01-16 12:08:03 +00005 */
6
Dan Handley2bd4ef22014-04-09 13:14:54 +01007#include <arch.h>
Andrew Thoelke38bde412014-03-18 13:46:55 +00008#include <asm_macros.S>
Jan Dabrosfa015982019-12-02 13:30:03 +01009#include <assert_macros.S>
Dan Handley2bd4ef22014-04-09 13:14:54 +010010#include <context.h>
Manish V Badarkhee07e8082020-07-23 12:43:25 +010011#include <el3_common_macros.S>
Achin Gupta9ac63c52014-01-16 12:08:03 +000012
Max Shvetsovbdf502d2020-02-25 13:56:19 +000013#if CTX_INCLUDE_EL2_REGS
Zelalem Aweke5362beb2022-04-04 17:42:48 -050014 .global el2_sysregs_context_save_common
15 .global el2_sysregs_context_restore_common
Zelalem Aweke5362beb2022-04-04 17:42:48 -050016#if CTX_INCLUDE_MTE_REGS
17 .global el2_sysregs_context_save_mte
18 .global el2_sysregs_context_restore_mte
19#endif /* CTX_INCLUDE_MTE_REGS */
Zelalem Aweke5362beb2022-04-04 17:42:48 -050020#if RAS_EXTENSION
21 .global el2_sysregs_context_save_ras
22 .global el2_sysregs_context_restore_ras
23#endif /* RAS_EXTENSION */
24#if CTX_INCLUDE_NEVE_REGS
25 .global el2_sysregs_context_save_nv2
26 .global el2_sysregs_context_restore_nv2
27#endif /* CTX_INCLUDE_NEVE_REGS */
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000028#endif /* CTX_INCLUDE_EL2_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +000029
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010030 .global el1_sysregs_context_save
31 .global el1_sysregs_context_restore
32#if CTX_INCLUDE_FPREGS
33 .global fpregs_context_save
34 .global fpregs_context_restore
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000035#endif /* CTX_INCLUDE_FPREGS */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +000036 .global prepare_el3_entry
Alexei Fedorovf41355c2019-09-13 14:11:59 +010037 .global restore_gp_pmcr_pauth_regs
Manish V Badarkhee07e8082020-07-23 12:43:25 +010038 .global save_and_update_ptw_el1_sys_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010039 .global el3_exit
40
Max Shvetsovbdf502d2020-02-25 13:56:19 +000041#if CTX_INCLUDE_EL2_REGS
42
43/* -----------------------------------------------------
Zelalem Aweke5362beb2022-04-04 17:42:48 -050044 * The following functions strictly follow the AArch64
Max Shvetsovcf784f72021-03-31 19:00:38 +010045 * PCS to use x9-x16 (temporary caller-saved registers)
Zelalem Aweke5362beb2022-04-04 17:42:48 -050046 * to save/restore EL2 system register context.
47 * el2_sysregs_context_save/restore_common functions
48 * save and restore registers that are common to all
49 * configurations. The rest of the functions save and
50 * restore EL2 system registers that are present when a
51 * particular feature is enabled. All functions assume
52 * that 'x0' is pointing to a 'el2_sys_regs' structure
53 * where the register context will be saved/restored.
Max Shvetsovc9e2c922020-02-17 16:15:47 +000054 *
55 * The following registers are not added.
56 * AMEVCNTVOFF0<n>_EL2
57 * AMEVCNTVOFF1<n>_EL2
58 * ICH_AP0R<n>_EL2
59 * ICH_AP1R<n>_EL2
60 * ICH_LR<n>_EL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +000061 * -----------------------------------------------------
62 */
Zelalem Aweke5362beb2022-04-04 17:42:48 -050063func el2_sysregs_context_save_common
Max Shvetsovbdf502d2020-02-25 13:56:19 +000064 mrs x9, actlr_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000065 mrs x10, afsr0_el2
66 stp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000067
Max Shvetsovc9e2c922020-02-17 16:15:47 +000068 mrs x11, afsr1_el2
69 mrs x12, amair_el2
70 stp x11, x12, [x0, #CTX_AFSR1_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000071
Max Shvetsovc9e2c922020-02-17 16:15:47 +000072 mrs x13, cnthctl_el2
Max Shvetsovcf784f72021-03-31 19:00:38 +010073 mrs x14, cntvoff_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000074 stp x13, x14, [x0, #CTX_CNTHCTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000075
Max Shvetsovcf784f72021-03-31 19:00:38 +010076 mrs x15, cptr_el2
77 str x15, [x0, #CTX_CPTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000078
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +010079#if CTX_INCLUDE_AARCH32_REGS
Max Shvetsovcf784f72021-03-31 19:00:38 +010080 mrs x16, dbgvcr32_el2
81 str x16, [x0, #CTX_DBGVCR32_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +000082#endif /* CTX_INCLUDE_AARCH32_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +000083
Max Shvetsovcf784f72021-03-31 19:00:38 +010084 mrs x9, elr_el2
85 mrs x10, esr_el2
86 stp x9, x10, [x0, #CTX_ELR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000087
Max Shvetsovcf784f72021-03-31 19:00:38 +010088 mrs x11, far_el2
89 mrs x12, hacr_el2
90 stp x11, x12, [x0, #CTX_FAR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000091
Max Shvetsovcf784f72021-03-31 19:00:38 +010092 mrs x13, hcr_el2
93 mrs x14, hpfar_el2
94 stp x13, x14, [x0, #CTX_HCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000095
Max Shvetsovcf784f72021-03-31 19:00:38 +010096 mrs x15, hstr_el2
97 mrs x16, ICC_SRE_EL2
98 stp x15, x16, [x0, #CTX_HSTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000099
Max Shvetsovcf784f72021-03-31 19:00:38 +0100100 mrs x9, ICH_HCR_EL2
101 mrs x10, ICH_VMCR_EL2
102 stp x9, x10, [x0, #CTX_ICH_HCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000103
Max Shvetsovcf784f72021-03-31 19:00:38 +0100104 mrs x11, mair_el2
105 mrs x12, mdcr_el2
106 stp x11, x12, [x0, #CTX_MAIR_EL2]
107
Max Shvetsovcf784f72021-03-31 19:00:38 +0100108 mrs x14, sctlr_el2
109 str x14, [x0, #CTX_SCTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000110
Max Shvetsovcf784f72021-03-31 19:00:38 +0100111 mrs x15, spsr_el2
112 mrs x16, sp_el2
113 stp x15, x16, [x0, #CTX_SPSR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000114
Max Shvetsovcf784f72021-03-31 19:00:38 +0100115 mrs x9, tcr_el2
116 mrs x10, tpidr_el2
117 stp x9, x10, [x0, #CTX_TCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000118
Max Shvetsovcf784f72021-03-31 19:00:38 +0100119 mrs x11, ttbr0_el2
120 mrs x12, vbar_el2
121 stp x11, x12, [x0, #CTX_TTBR0_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000122
Max Shvetsovcf784f72021-03-31 19:00:38 +0100123 mrs x13, vmpidr_el2
124 mrs x14, vpidr_el2
125 stp x13, x14, [x0, #CTX_VMPIDR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000126
Max Shvetsovcf784f72021-03-31 19:00:38 +0100127 mrs x15, vtcr_el2
128 mrs x16, vttbr_el2
129 stp x15, x16, [x0, #CTX_VTCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000130 ret
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500131endfunc el2_sysregs_context_save_common
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000132
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500133func el2_sysregs_context_restore_common
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000134 ldp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000135 msr actlr_el2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000136 msr afsr0_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000137
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000138 ldp x11, x12, [x0, #CTX_AFSR1_EL2]
139 msr afsr1_el2, x11
140 msr amair_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000141
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000142 ldp x13, x14, [x0, #CTX_CNTHCTL_EL2]
143 msr cnthctl_el2, x13
Max Shvetsovcf784f72021-03-31 19:00:38 +0100144 msr cntvoff_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000145
Max Shvetsovcf784f72021-03-31 19:00:38 +0100146 ldr x15, [x0, #CTX_CPTR_EL2]
147 msr cptr_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000148
Arunachalam Ganapathydca591b2020-05-26 11:32:35 +0100149#if CTX_INCLUDE_AARCH32_REGS
Max Shvetsovcf784f72021-03-31 19:00:38 +0100150 ldr x16, [x0, #CTX_DBGVCR32_EL2]
151 msr dbgvcr32_el2, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000152#endif /* CTX_INCLUDE_AARCH32_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000153
Max Shvetsovcf784f72021-03-31 19:00:38 +0100154 ldp x9, x10, [x0, #CTX_ELR_EL2]
155 msr elr_el2, x9
156 msr esr_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000157
Max Shvetsovcf784f72021-03-31 19:00:38 +0100158 ldp x11, x12, [x0, #CTX_FAR_EL2]
159 msr far_el2, x11
160 msr hacr_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000161
Max Shvetsovcf784f72021-03-31 19:00:38 +0100162 ldp x13, x14, [x0, #CTX_HCR_EL2]
163 msr hcr_el2, x13
164 msr hpfar_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000165
Max Shvetsovcf784f72021-03-31 19:00:38 +0100166 ldp x15, x16, [x0, #CTX_HSTR_EL2]
167 msr hstr_el2, x15
168 msr ICC_SRE_EL2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000169
Max Shvetsovcf784f72021-03-31 19:00:38 +0100170 ldp x9, x10, [x0, #CTX_ICH_HCR_EL2]
171 msr ICH_HCR_EL2, x9
172 msr ICH_VMCR_EL2, x10
173
174 ldp x11, x12, [x0, #CTX_MAIR_EL2]
175 msr mair_el2, x11
176 msr mdcr_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000177
Max Shvetsovcf784f72021-03-31 19:00:38 +0100178 ldr x14, [x0, #CTX_SCTLR_EL2]
179 msr sctlr_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000180
Max Shvetsovcf784f72021-03-31 19:00:38 +0100181 ldp x15, x16, [x0, #CTX_SPSR_EL2]
182 msr spsr_el2, x15
183 msr sp_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000184
Max Shvetsovcf784f72021-03-31 19:00:38 +0100185 ldp x9, x10, [x0, #CTX_TCR_EL2]
186 msr tcr_el2, x9
187 msr tpidr_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000188
Max Shvetsovcf784f72021-03-31 19:00:38 +0100189 ldp x11, x12, [x0, #CTX_TTBR0_EL2]
190 msr ttbr0_el2, x11
191 msr vbar_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000192
Max Shvetsovcf784f72021-03-31 19:00:38 +0100193 ldp x13, x14, [x0, #CTX_VMPIDR_EL2]
194 msr vmpidr_el2, x13
195 msr vpidr_el2, x14
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100196
Max Shvetsovcf784f72021-03-31 19:00:38 +0100197 ldp x15, x16, [x0, #CTX_VTCR_EL2]
198 msr vtcr_el2, x15
199 msr vttbr_el2, x16
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500200 ret
201endfunc el2_sysregs_context_restore_common
202
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000203#if CTX_INCLUDE_MTE_REGS
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500204func el2_sysregs_context_save_mte
205 mrs x9, TFSR_EL2
206 str x9, [x0, #CTX_TFSR_EL2]
207 ret
208endfunc el2_sysregs_context_save_mte
209
210func el2_sysregs_context_restore_mte
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100211 ldr x9, [x0, #CTX_TFSR_EL2]
212 msr TFSR_EL2, x9
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500213 ret
214endfunc el2_sysregs_context_restore_mte
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000215#endif /* CTX_INCLUDE_MTE_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000216
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000217#if RAS_EXTENSION
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500218func el2_sysregs_context_save_ras
219 /*
220 * VDISR_EL2 and VSESR_EL2 registers are saved only when
221 * FEAT_RAS is supported.
222 */
223 mrs x11, vdisr_el2
224 mrs x12, vsesr_el2
225 stp x11, x12, [x0, #CTX_VDISR_EL2]
226 ret
227endfunc el2_sysregs_context_save_ras
228
229func el2_sysregs_context_restore_ras
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000230 /*
231 * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS
232 * is supported.
233 */
234 ldp x11, x12, [x0, #CTX_VDISR_EL2]
235 msr vdisr_el2, x11
236 msr vsesr_el2, x12
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500237 ret
238endfunc el2_sysregs_context_restore_ras
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000239#endif /* RAS_EXTENSION */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000240
Arunachalam Ganapathydd3ec7e2020-05-28 11:57:09 +0100241#if CTX_INCLUDE_NEVE_REGS
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500242func el2_sysregs_context_save_nv2
243 /*
244 * VNCR_EL2 register is saved only when FEAT_NV2 is supported.
245 */
246 mrs x16, vncr_el2
247 str x16, [x0, #CTX_VNCR_EL2]
248 ret
249endfunc el2_sysregs_context_save_nv2
250
251func el2_sysregs_context_restore_nv2
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000252 /*
253 * VNCR_EL2 register is restored only when FEAT_NV2 is supported.
254 */
Max Shvetsovcf784f72021-03-31 19:00:38 +0100255 ldr x16, [x0, #CTX_VNCR_EL2]
256 msr vncr_el2, x16
Zelalem Aweke5362beb2022-04-04 17:42:48 -0500257 ret
258endfunc el2_sysregs_context_restore_nv2
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000259#endif /* CTX_INCLUDE_NEVE_REGS */
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000260
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000261#endif /* CTX_INCLUDE_EL2_REGS */
262
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100263/* ------------------------------------------------------------------
264 * The following function strictly follows the AArch64 PCS to use
265 * x9-x17 (temporary caller-saved registers) to save EL1 system
266 * register context. It assumes that 'x0' is pointing to a
267 * 'el1_sys_regs' structure where the register context will be saved.
268 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000269 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000270func el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000271
272 mrs x9, spsr_el1
273 mrs x10, elr_el1
274 stp x9, x10, [x0, #CTX_SPSR_EL1]
275
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100276#if !ERRATA_SPECULATIVE_AT
Achin Gupta9ac63c52014-01-16 12:08:03 +0000277 mrs x15, sctlr_el1
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100278 mrs x16, tcr_el1
Achin Gupta9ac63c52014-01-16 12:08:03 +0000279 stp x15, x16, [x0, #CTX_SCTLR_EL1]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000280#endif /* ERRATA_SPECULATIVE_AT */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000281
282 mrs x17, cpacr_el1
283 mrs x9, csselr_el1
284 stp x17, x9, [x0, #CTX_CPACR_EL1]
285
286 mrs x10, sp_el1
287 mrs x11, esr_el1
288 stp x10, x11, [x0, #CTX_SP_EL1]
289
290 mrs x12, ttbr0_el1
291 mrs x13, ttbr1_el1
292 stp x12, x13, [x0, #CTX_TTBR0_EL1]
293
294 mrs x14, mair_el1
295 mrs x15, amair_el1
296 stp x14, x15, [x0, #CTX_MAIR_EL1]
297
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100298 mrs x16, actlr_el1
Achin Gupta9ac63c52014-01-16 12:08:03 +0000299 mrs x17, tpidr_el1
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100300 stp x16, x17, [x0, #CTX_ACTLR_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000301
302 mrs x9, tpidr_el0
303 mrs x10, tpidrro_el0
304 stp x9, x10, [x0, #CTX_TPIDR_EL0]
305
Achin Gupta9ac63c52014-01-16 12:08:03 +0000306 mrs x13, par_el1
307 mrs x14, far_el1
308 stp x13, x14, [x0, #CTX_PAR_EL1]
309
310 mrs x15, afsr0_el1
311 mrs x16, afsr1_el1
312 stp x15, x16, [x0, #CTX_AFSR0_EL1]
313
314 mrs x17, contextidr_el1
315 mrs x9, vbar_el1
316 stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
317
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100318 /* Save AArch32 system registers if the build has instructed so */
319#if CTX_INCLUDE_AARCH32_REGS
320 mrs x11, spsr_abt
321 mrs x12, spsr_und
322 stp x11, x12, [x0, #CTX_SPSR_ABT]
323
324 mrs x13, spsr_irq
325 mrs x14, spsr_fiq
326 stp x13, x14, [x0, #CTX_SPSR_IRQ]
327
328 mrs x15, dacr32_el2
329 mrs x16, ifsr32_el2
330 stp x15, x16, [x0, #CTX_DACR32_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000331#endif /* CTX_INCLUDE_AARCH32_REGS */
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100332
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100333 /* Save NS timer registers if the build has instructed so */
334#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000335 mrs x10, cntp_ctl_el0
336 mrs x11, cntp_cval_el0
337 stp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
338
339 mrs x12, cntv_ctl_el0
340 mrs x13, cntv_cval_el0
341 stp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
342
343 mrs x14, cntkctl_el1
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100344 str x14, [x0, #CTX_CNTKCTL_EL1]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000345#endif /* NS_TIMER_SWITCH */
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100346
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100347 /* Save MTE system registers if the build has instructed so */
348#if CTX_INCLUDE_MTE_REGS
349 mrs x15, TFSRE0_EL1
350 mrs x16, TFSR_EL1
351 stp x15, x16, [x0, #CTX_TFSRE0_EL1]
352
353 mrs x9, RGSR_EL1
354 mrs x10, GCR_EL1
355 stp x9, x10, [x0, #CTX_RGSR_EL1]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000356#endif /* CTX_INCLUDE_MTE_REGS */
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100357
Achin Gupta9ac63c52014-01-16 12:08:03 +0000358 ret
Kévin Petita877c252015-03-24 14:03:57 +0000359endfunc el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000360
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100361/* ------------------------------------------------------------------
362 * The following function strictly follows the AArch64 PCS to use
363 * x9-x17 (temporary caller-saved registers) to restore EL1 system
364 * register context. It assumes that 'x0' is pointing to a
365 * 'el1_sys_regs' structure from where the register context will be
366 * restored
367 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000368 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000369func el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000370
371 ldp x9, x10, [x0, #CTX_SPSR_EL1]
372 msr spsr_el1, x9
373 msr elr_el1, x10
374
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100375#if !ERRATA_SPECULATIVE_AT
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100376 ldp x15, x16, [x0, #CTX_SCTLR_EL1]
377 msr sctlr_el1, x15
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100378 msr tcr_el1, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000379#endif /* ERRATA_SPECULATIVE_AT */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000380
381 ldp x17, x9, [x0, #CTX_CPACR_EL1]
382 msr cpacr_el1, x17
383 msr csselr_el1, x9
384
385 ldp x10, x11, [x0, #CTX_SP_EL1]
386 msr sp_el1, x10
387 msr esr_el1, x11
388
389 ldp x12, x13, [x0, #CTX_TTBR0_EL1]
390 msr ttbr0_el1, x12
391 msr ttbr1_el1, x13
392
393 ldp x14, x15, [x0, #CTX_MAIR_EL1]
394 msr mair_el1, x14
395 msr amair_el1, x15
396
Manish V Badarkhe2b0ee972020-07-28 07:22:30 +0100397 ldp x16, x17, [x0, #CTX_ACTLR_EL1]
398 msr actlr_el1, x16
Manish V Badarkhed73c1ba2020-07-28 07:12:56 +0100399 msr tpidr_el1, x17
Achin Gupta9ac63c52014-01-16 12:08:03 +0000400
401 ldp x9, x10, [x0, #CTX_TPIDR_EL0]
402 msr tpidr_el0, x9
403 msr tpidrro_el0, x10
404
Achin Gupta9ac63c52014-01-16 12:08:03 +0000405 ldp x13, x14, [x0, #CTX_PAR_EL1]
406 msr par_el1, x13
407 msr far_el1, x14
408
409 ldp x15, x16, [x0, #CTX_AFSR0_EL1]
410 msr afsr0_el1, x15
411 msr afsr1_el1, x16
412
413 ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
414 msr contextidr_el1, x17
415 msr vbar_el1, x9
416
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100417 /* Restore AArch32 system registers if the build has instructed so */
418#if CTX_INCLUDE_AARCH32_REGS
419 ldp x11, x12, [x0, #CTX_SPSR_ABT]
420 msr spsr_abt, x11
421 msr spsr_und, x12
422
423 ldp x13, x14, [x0, #CTX_SPSR_IRQ]
424 msr spsr_irq, x13
425 msr spsr_fiq, x14
426
427 ldp x15, x16, [x0, #CTX_DACR32_EL2]
428 msr dacr32_el2, x15
429 msr ifsr32_el2, x16
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000430#endif /* CTX_INCLUDE_AARCH32_REGS */
431
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100432 /* Restore NS timer registers if the build has instructed so */
433#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000434 ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
435 msr cntp_ctl_el0, x10
436 msr cntp_cval_el0, x11
437
438 ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
439 msr cntv_ctl_el0, x12
440 msr cntv_cval_el0, x13
441
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100442 ldr x14, [x0, #CTX_CNTKCTL_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000443 msr cntkctl_el1, x14
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000444#endif /* NS_TIMER_SWITCH */
445
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100446 /* Restore MTE system registers if the build has instructed so */
447#if CTX_INCLUDE_MTE_REGS
448 ldp x11, x12, [x0, #CTX_TFSRE0_EL1]
449 msr TFSRE0_EL1, x11
450 msr TFSR_EL1, x12
451
452 ldp x13, x14, [x0, #CTX_RGSR_EL1]
453 msr RGSR_EL1, x13
454 msr GCR_EL1, x14
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000455#endif /* CTX_INCLUDE_MTE_REGS */
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100456
Achin Gupta9ac63c52014-01-16 12:08:03 +0000457 /* No explict ISB required here as ERET covers it */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000458 ret
Kévin Petita877c252015-03-24 14:03:57 +0000459endfunc el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000460
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100461/* ------------------------------------------------------------------
462 * The following function follows the aapcs_64 strictly to use
463 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
464 * to save floating point register context. It assumes that 'x0' is
465 * pointing to a 'fp_regs' structure where the register context will
Achin Gupta9ac63c52014-01-16 12:08:03 +0000466 * be saved.
467 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100468 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
469 * However currently we don't use VFP registers nor set traps in
470 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000471 *
472 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100473 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000474 */
Juan Castillo258e94f2014-06-25 17:26:36 +0100475#if CTX_INCLUDE_FPREGS
Andrew Thoelke38bde412014-03-18 13:46:55 +0000476func fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000477 stp q0, q1, [x0, #CTX_FP_Q0]
478 stp q2, q3, [x0, #CTX_FP_Q2]
479 stp q4, q5, [x0, #CTX_FP_Q4]
480 stp q6, q7, [x0, #CTX_FP_Q6]
481 stp q8, q9, [x0, #CTX_FP_Q8]
482 stp q10, q11, [x0, #CTX_FP_Q10]
483 stp q12, q13, [x0, #CTX_FP_Q12]
484 stp q14, q15, [x0, #CTX_FP_Q14]
485 stp q16, q17, [x0, #CTX_FP_Q16]
486 stp q18, q19, [x0, #CTX_FP_Q18]
487 stp q20, q21, [x0, #CTX_FP_Q20]
488 stp q22, q23, [x0, #CTX_FP_Q22]
489 stp q24, q25, [x0, #CTX_FP_Q24]
490 stp q26, q27, [x0, #CTX_FP_Q26]
491 stp q28, q29, [x0, #CTX_FP_Q28]
492 stp q30, q31, [x0, #CTX_FP_Q30]
493
494 mrs x9, fpsr
495 str x9, [x0, #CTX_FP_FPSR]
496
497 mrs x10, fpcr
498 str x10, [x0, #CTX_FP_FPCR]
499
David Cunadod1a1fd42017-10-20 11:30:57 +0100500#if CTX_INCLUDE_AARCH32_REGS
501 mrs x11, fpexc32_el2
502 str x11, [x0, #CTX_FP_FPEXC32_EL2]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000503#endif /* CTX_INCLUDE_AARCH32_REGS */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000504 ret
Kévin Petita877c252015-03-24 14:03:57 +0000505endfunc fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000506
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100507/* ------------------------------------------------------------------
508 * The following function follows the aapcs_64 strictly to use x9-x17
509 * (temporary caller-saved registers according to AArch64 PCS) to
510 * restore floating point register context. It assumes that 'x0' is
511 * pointing to a 'fp_regs' structure from where the register context
Achin Gupta9ac63c52014-01-16 12:08:03 +0000512 * will be restored.
513 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100514 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
515 * However currently we don't use VFP registers nor set traps in
516 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000517 *
518 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100519 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000520 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000521func fpregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000522 ldp q0, q1, [x0, #CTX_FP_Q0]
523 ldp q2, q3, [x0, #CTX_FP_Q2]
524 ldp q4, q5, [x0, #CTX_FP_Q4]
525 ldp q6, q7, [x0, #CTX_FP_Q6]
526 ldp q8, q9, [x0, #CTX_FP_Q8]
527 ldp q10, q11, [x0, #CTX_FP_Q10]
528 ldp q12, q13, [x0, #CTX_FP_Q12]
529 ldp q14, q15, [x0, #CTX_FP_Q14]
530 ldp q16, q17, [x0, #CTX_FP_Q16]
531 ldp q18, q19, [x0, #CTX_FP_Q18]
532 ldp q20, q21, [x0, #CTX_FP_Q20]
533 ldp q22, q23, [x0, #CTX_FP_Q22]
534 ldp q24, q25, [x0, #CTX_FP_Q24]
535 ldp q26, q27, [x0, #CTX_FP_Q26]
536 ldp q28, q29, [x0, #CTX_FP_Q28]
537 ldp q30, q31, [x0, #CTX_FP_Q30]
538
539 ldr x9, [x0, #CTX_FP_FPSR]
540 msr fpsr, x9
541
Soby Mathewe77e1162015-12-03 09:42:50 +0000542 ldr x10, [x0, #CTX_FP_FPCR]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000543 msr fpcr, x10
544
David Cunadod1a1fd42017-10-20 11:30:57 +0100545#if CTX_INCLUDE_AARCH32_REGS
546 ldr x11, [x0, #CTX_FP_FPEXC32_EL2]
547 msr fpexc32_el2, x11
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000548#endif /* CTX_INCLUDE_AARCH32_REGS */
549
Achin Gupta9ac63c52014-01-16 12:08:03 +0000550 /*
551 * No explict ISB required here as ERET to
Sandrine Bailleuxf4119ec2015-12-17 13:58:58 +0000552 * switch to secure EL1 or non-secure world
Achin Gupta9ac63c52014-01-16 12:08:03 +0000553 * covers it
554 */
555
556 ret
Kévin Petita877c252015-03-24 14:03:57 +0000557endfunc fpregs_context_restore
Juan Castillo258e94f2014-06-25 17:26:36 +0100558#endif /* CTX_INCLUDE_FPREGS */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100559
Daniel Boulby928747f2021-05-25 18:09:34 +0100560 /*
Manish Pandey62d532a2022-11-17 15:47:05 +0000561 * Set SCR_EL3.EA bit to enable SErrors at EL3
562 */
563 .macro enable_serror_at_el3
564 mrs x8, scr_el3
565 orr x8, x8, #SCR_EA_BIT
566 msr scr_el3, x8
567 .endm
568
569 /*
Daniel Boulby928747f2021-05-25 18:09:34 +0100570 * Set the PSTATE bits not set when the exception was taken as
571 * described in the AArch64.TakeException() pseudocode function
572 * in ARM DDI 0487F.c page J1-7635 to a default value.
573 */
574 .macro set_unset_pstate_bits
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000575 /*
576 * If Data Independent Timing (DIT) functionality is implemented,
577 * always enable DIT in EL3
578 */
Daniel Boulby928747f2021-05-25 18:09:34 +0100579#if ENABLE_FEAT_DIT
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000580 mov x8, #DIT_BIT
581 msr DIT, x8
Daniel Boulby928747f2021-05-25 18:09:34 +0100582#endif /* ENABLE_FEAT_DIT */
583 .endm /* set_unset_pstate_bits */
584
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100585/* ------------------------------------------------------------------
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000586 * The following macro is used to save and restore all the general
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100587 * purpose and ARMv8.3-PAuth (if enabled) registers.
Jayanth Dodderi Chidanand4ec78ad2022-09-19 23:32:08 +0100588 * It also checks if the Secure Cycle Counter (PMCCNTR_EL0)
589 * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0
590 * needs not to be saved/restored during world switch.
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100591 *
592 * Ideally we would only save and restore the callee saved registers
593 * when a world switch occurs but that type of implementation is more
594 * complex. So currently we will always save and restore these
595 * registers on entry and exit of EL3.
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100596 * clobbers: x18
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100597 * ------------------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100598 */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000599 .macro save_gp_pmcr_pauth_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100600 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
601 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
602 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
603 stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
604 stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
605 stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
606 stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
607 stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
608 stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
609 stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
610 stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
611 stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
612 stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
613 stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
614 stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
615 mrs x18, sp_el0
616 str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100617
618 /* ----------------------------------------------------------
Jayanth Dodderi Chidanand4ec78ad2022-09-19 23:32:08 +0100619 * Check if earlier initialization of MDCR_EL3.SCCD/MCCD to 1
620 * has failed.
621 *
622 * MDCR_EL3:
623 * MCCD bit set, Prohibits the Cycle Counter PMCCNTR_EL0 from
624 * counting at EL3.
625 * SCCD bit set, Secure Cycle Counter Disable. Prohibits PMCCNTR_EL0
626 * from counting in Secure state.
627 * If these bits are not set, meaning that FEAT_PMUv3p5/7 is
628 * not implemented and PMCR_EL0 should be saved in non-secure
629 * context.
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100630 * ----------------------------------------------------------
631 */
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100632 mov_imm x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100633 mrs x9, mdcr_el3
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100634 tst x9, x10
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100635 bne 1f
636
Jayanth Dodderi Chidanand4ec78ad2022-09-19 23:32:08 +0100637 /* ----------------------------------------------------------
638 * If control reaches here, it ensures the Secure Cycle
639 * Counter (PMCCNTR_EL0) is not prohibited from counting at
640 * EL3 and in secure states.
641 * Henceforth, PMCR_EL0 to be saved before world switch.
642 * ----------------------------------------------------------
643 */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100644 mrs x9, pmcr_el0
645
646 /* Check caller's security state */
647 mrs x10, scr_el3
648 tst x10, #SCR_NS_BIT
649 beq 2f
650
651 /* Save PMCR_EL0 if called from Non-secure state */
652 str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
653
654 /* Disable cycle counter when event counting is prohibited */
6552: orr x9, x9, #PMCR_EL0_DP_BIT
656 msr pmcr_el0, x9
657 isb
6581:
659#if CTX_INCLUDE_PAUTH_REGS
660 /* ----------------------------------------------------------
661 * Save the ARMv8.3-PAuth keys as they are not banked
662 * by exception level
663 * ----------------------------------------------------------
664 */
665 add x19, sp, #CTX_PAUTH_REGS_OFFSET
666
667 mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */
668 mrs x21, APIAKeyHi_EL1
669 mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */
670 mrs x23, APIBKeyHi_EL1
671 mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */
672 mrs x25, APDAKeyHi_EL1
673 mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */
674 mrs x27, APDBKeyHi_EL1
675 mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */
676 mrs x29, APGAKeyHi_EL1
677
678 stp x20, x21, [x19, #CTX_PACIAKEY_LO]
679 stp x22, x23, [x19, #CTX_PACIBKEY_LO]
680 stp x24, x25, [x19, #CTX_PACDAKEY_LO]
681 stp x26, x27, [x19, #CTX_PACDBKEY_LO]
682 stp x28, x29, [x19, #CTX_PACGAKEY_LO]
683#endif /* CTX_INCLUDE_PAUTH_REGS */
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000684 .endm /* save_gp_pmcr_pauth_regs */
685
686/* -----------------------------------------------------------------
Daniel Boulby928747f2021-05-25 18:09:34 +0100687 * This function saves the context and sets the PSTATE to a known
688 * state, preparing entry to el3.
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000689 * Save all the general purpose and ARMv8.3-PAuth (if enabled)
690 * registers.
Daniel Boulby928747f2021-05-25 18:09:34 +0100691 * Then set any of the PSTATE bits that are not set by hardware
692 * according to the Aarch64.TakeException pseudocode in the Arm
693 * Architecture Reference Manual to a default value for EL3.
694 * clobbers: x17
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000695 * -----------------------------------------------------------------
696 */
697func prepare_el3_entry
698 save_gp_pmcr_pauth_regs
Manish Pandey62d532a2022-11-17 15:47:05 +0000699 enable_serror_at_el3
Daniel Boulby928747f2021-05-25 18:09:34 +0100700 /*
701 * Set the PSTATE bits not described in the Aarch64.TakeException
702 * pseudocode to their default values.
703 */
704 set_unset_pstate_bits
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100705 ret
Daniel Boulby95fb1aa2022-01-19 11:20:05 +0000706endfunc prepare_el3_entry
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100707
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100708/* ------------------------------------------------------------------
709 * This function restores ARMv8.3-PAuth (if enabled) and all general
710 * purpose registers except x30 from the CPU context.
711 * x30 register must be explicitly restored by the caller.
712 * ------------------------------------------------------------------
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000713 */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100714func restore_gp_pmcr_pauth_regs
715#if CTX_INCLUDE_PAUTH_REGS
716 /* Restore the ARMv8.3 PAuth keys */
717 add x10, sp, #CTX_PAUTH_REGS_OFFSET
718
719 ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */
720 ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */
721 ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */
722 ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */
723 ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */
724
725 msr APIAKeyLo_EL1, x0
726 msr APIAKeyHi_EL1, x1
727 msr APIBKeyLo_EL1, x2
728 msr APIBKeyHi_EL1, x3
729 msr APDAKeyLo_EL1, x4
730 msr APDAKeyHi_EL1, x5
731 msr APDBKeyLo_EL1, x6
732 msr APDBKeyHi_EL1, x7
733 msr APGAKeyLo_EL1, x8
734 msr APGAKeyHi_EL1, x9
735#endif /* CTX_INCLUDE_PAUTH_REGS */
736
737 /* ----------------------------------------------------------
738 * Restore PMCR_EL0 when returning to Non-secure state if
739 * Secure Cycle Counter is not disabled in MDCR_EL3 when
740 * ARMv8.5-PMU is implemented.
741 * ----------------------------------------------------------
742 */
743 mrs x0, scr_el3
744 tst x0, #SCR_NS_BIT
745 beq 2f
746
747 /* ----------------------------------------------------------
748 * Back to Non-secure state.
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100749 * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1
750 * failed, meaning that FEAT_PMUv3p5/7 is not implemented and
751 * PMCR_EL0 should be restored from non-secure context.
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100752 * ----------------------------------------------------------
753 */
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100754 mov_imm x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT)
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100755 mrs x0, mdcr_el3
Alexei Fedorov307f34b2021-05-14 11:21:56 +0100756 tst x0, x1
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100757 bne 2f
758 ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
759 msr pmcr_el0, x0
7602:
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100761 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
762 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100763 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
764 ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
765 ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
766 ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
767 ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
768 ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000769 ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100770 ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
771 ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
772 ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
773 ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
774 ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000775 ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
776 msr sp_el0, x28
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100777 ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000778 ret
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100779endfunc restore_gp_pmcr_pauth_regs
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000780
Manish V Badarkhee07e8082020-07-23 12:43:25 +0100781/*
782 * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1
783 * registers and update EL1 registers to disable stage1 and stage2
784 * page table walk
785 */
786func save_and_update_ptw_el1_sys_regs
787 /* ----------------------------------------------------------
788 * Save only sctlr_el1 and tcr_el1 registers
789 * ----------------------------------------------------------
790 */
791 mrs x29, sctlr_el1
792 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)]
793 mrs x29, tcr_el1
794 str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)]
795
796 /* ------------------------------------------------------------
797 * Must follow below order in order to disable page table
798 * walk for lower ELs (EL1 and EL0). First step ensures that
799 * page table walk is disabled for stage1 and second step
800 * ensures that page table walker should use TCR_EL1.EPDx
801 * bits to perform address translation. ISB ensures that CPU
802 * does these 2 steps in order.
803 *
804 * 1. Update TCR_EL1.EPDx bits to disable page table walk by
805 * stage1.
806 * 2. Enable MMU bit to avoid identity mapping via stage2
807 * and force TCR_EL1.EPDx to be used by the page table
808 * walker.
809 * ------------------------------------------------------------
810 */
811 orr x29, x29, #(TCR_EPD0_BIT)
812 orr x29, x29, #(TCR_EPD1_BIT)
813 msr tcr_el1, x29
814 isb
815 mrs x29, sctlr_el1
816 orr x29, x29, #SCTLR_M_BIT
817 msr sctlr_el1, x29
818 isb
819
820 ret
821endfunc save_and_update_ptw_el1_sys_regs
822
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100823/* ------------------------------------------------------------------
824 * This routine assumes that the SP_EL3 is pointing to a valid
825 * context structure from where the gp regs and other special
826 * registers can be retrieved.
827 * ------------------------------------------------------------------
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +0000828 */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100829func el3_exit
Jan Dabrosfa015982019-12-02 13:30:03 +0100830#if ENABLE_ASSERTIONS
831 /* el3_exit assumes SP_EL0 on entry */
832 mrs x17, spsel
833 cmp x17, #MODE_SP_EL0
834 ASM_ASSERT(eq)
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000835#endif /* ENABLE_ASSERTIONS */
Jan Dabrosfa015982019-12-02 13:30:03 +0100836
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100837 /* ----------------------------------------------------------
838 * Save the current SP_EL0 i.e. the EL3 runtime stack which
839 * will be used for handling the next SMC.
840 * Then switch to SP_EL3.
841 * ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100842 */
843 mov x17, sp
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100844 msr spsel, #MODE_SP_ELX
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100845 str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
846
Max Shvetsovc4502772021-03-22 11:59:37 +0000847#if IMAGE_BL31
848 /* ----------------------------------------------------------
Arunachalam Ganapathycac7d162021-07-08 09:35:57 +0100849 * Restore CPTR_EL3.
Max Shvetsovc4502772021-03-22 11:59:37 +0000850 * ZCR is only restored if SVE is supported and enabled.
851 * Synchronization is required before zcr_el3 is addressed.
852 * ----------------------------------------------------------
853 */
Max Shvetsovc4502772021-03-22 11:59:37 +0000854 ldp x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3]
855 msr cptr_el3, x19
856
857 ands x19, x19, #CPTR_EZ_BIT
858 beq sve_not_enabled
859
860 isb
861 msr S3_6_C1_C2_0, x20 /* zcr_el3 */
862sve_not_enabled:
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000863#endif /* IMAGE_BL31 */
Max Shvetsovc4502772021-03-22 11:59:37 +0000864
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100865#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100866 /* ----------------------------------------------------------
867 * Restore mitigation state as it was on entry to EL3
868 * ----------------------------------------------------------
869 */
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100870 ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100871 cbz x17, 1f
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100872 blr x17
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +00008731:
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000874#endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */
875
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100876#if IMAGE_BL31 && RAS_EXTENSION
877 /* ----------------------------------------------------------
878 * Issue Error Synchronization Barrier to synchronize SErrors
879 * before exiting EL3. We're running with EAs unmasked, so
880 * any synchronized errors would be taken immediately;
881 * therefore no need to inspect DISR_EL1 register.
882 * ----------------------------------------------------------
883 */
884 esb
Madhukar Pappireddyfba25722020-07-24 03:27:12 -0500885#else
886 dsb sy
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000887#endif /* IMAGE_BL31 && RAS_EXTENSION */
888
Manish Pandey53bc59a2022-11-17 14:43:15 +0000889 /* ----------------------------------------------------------
890 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
891 * ----------------------------------------------------------
892 */
893 ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
894 ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
895 msr scr_el3, x18
896 msr spsr_el3, x16
897 msr elr_el3, x17
898
899 restore_ptw_el1_sys_regs
900
901 /* ----------------------------------------------------------
902 * Restore general purpose (including x30), PMCR_EL0 and
903 * ARMv8.3-PAuth registers.
904 * Exit EL3 via ERET to a lower exception level.
905 * ----------------------------------------------------------
906 */
907 bl restore_gp_pmcr_pauth_regs
908 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
909
Madhukar Pappireddyfba25722020-07-24 03:27:12 -0500910#ifdef IMAGE_BL31
911 str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
Jayanth Dodderi Chidanand72b69b82022-01-26 17:14:43 +0000912#endif /* IMAGE_BL31 */
913
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -0800914 exception_return
Antonio Nino Diaz594811b2019-01-31 11:58:00 +0000915
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100916endfunc el3_exit