blob: 984468a531f56040e0818e7b5b978a97e6ab2498 [file] [log] [blame]
Achin Gupta9ac63c52014-01-16 12:08:03 +00001/*
Max Shvetsovbdf502d2020-02-25 13:56:19 +00002 * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
Achin Gupta9ac63c52014-01-16 12:08:03 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta9ac63c52014-01-16 12:08:03 +00005 */
6
Dan Handley2bd4ef22014-04-09 13:14:54 +01007#include <arch.h>
Andrew Thoelke38bde412014-03-18 13:46:55 +00008#include <asm_macros.S>
Jan Dabrosfa015982019-12-02 13:30:03 +01009#include <assert_macros.S>
Dan Handley2bd4ef22014-04-09 13:14:54 +010010#include <context.h>
Achin Gupta9ac63c52014-01-16 12:08:03 +000011
Max Shvetsovbdf502d2020-02-25 13:56:19 +000012#if CTX_INCLUDE_EL2_REGS
13 .global el2_sysregs_context_save
14 .global el2_sysregs_context_restore
15#endif
16
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010017 .global el1_sysregs_context_save
18 .global el1_sysregs_context_restore
19#if CTX_INCLUDE_FPREGS
20 .global fpregs_context_save
21 .global fpregs_context_restore
22#endif
Alexei Fedorovf41355c2019-09-13 14:11:59 +010023 .global save_gp_pmcr_pauth_regs
24 .global restore_gp_pmcr_pauth_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010025 .global el3_exit
26
Max Shvetsovbdf502d2020-02-25 13:56:19 +000027#if CTX_INCLUDE_EL2_REGS
28
29/* -----------------------------------------------------
30 * The following function strictly follows the AArch64
31 * PCS to use x9-x17 (temporary caller-saved registers)
Max Shvetsovc9e2c922020-02-17 16:15:47 +000032 * to save EL2 system register context. It assumes that
33 * 'x0' is pointing to a 'el2_sys_regs' structure where
Max Shvetsovbdf502d2020-02-25 13:56:19 +000034 * the register context will be saved.
Max Shvetsovc9e2c922020-02-17 16:15:47 +000035 *
36 * The following registers are not added.
37 * AMEVCNTVOFF0<n>_EL2
38 * AMEVCNTVOFF1<n>_EL2
39 * ICH_AP0R<n>_EL2
40 * ICH_AP1R<n>_EL2
41 * ICH_LR<n>_EL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +000042 * -----------------------------------------------------
43 */
Max Shvetsovbdf502d2020-02-25 13:56:19 +000044
Max Shvetsovc9e2c922020-02-17 16:15:47 +000045func el2_sysregs_context_save
Max Shvetsovbdf502d2020-02-25 13:56:19 +000046 mrs x9, actlr_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000047 mrs x10, afsr0_el2
48 stp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000049
Max Shvetsovc9e2c922020-02-17 16:15:47 +000050 mrs x11, afsr1_el2
51 mrs x12, amair_el2
52 stp x11, x12, [x0, #CTX_AFSR1_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000053
Max Shvetsovc9e2c922020-02-17 16:15:47 +000054 mrs x13, cnthctl_el2
55 mrs x14, cnthp_ctl_el2
56 stp x13, x14, [x0, #CTX_CNTHCTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000057
Max Shvetsovc9e2c922020-02-17 16:15:47 +000058 mrs x15, cnthp_cval_el2
59 mrs x16, cnthp_tval_el2
60 stp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000061
Max Shvetsovc9e2c922020-02-17 16:15:47 +000062 mrs x17, cntvoff_el2
Max Shvetsovbdf502d2020-02-25 13:56:19 +000063 mrs x9, cptr_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000064 stp x17, x9, [x0, #CTX_CNTVOFF_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000065
Max Shvetsovc9e2c922020-02-17 16:15:47 +000066 mrs x10, dbgvcr32_el2
67 mrs x11, elr_el2
68 stp x10, x11, [x0, #CTX_DBGVCR32_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000069
Max Shvetsovc9e2c922020-02-17 16:15:47 +000070 mrs x14, esr_el2
71 mrs x15, far_el2
72 stp x14, x15, [x0, #CTX_ESR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000073
Max Shvetsovc9e2c922020-02-17 16:15:47 +000074 mrs x16, fpexc32_el2
75 mrs x17, hacr_el2
76 stp x16, x17, [x0, #CTX_FPEXC32_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000077
78 mrs x9, hcr_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000079 mrs x10, hpfar_el2
80 stp x9, x10, [x0, #CTX_HCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000081
Max Shvetsovc9e2c922020-02-17 16:15:47 +000082 mrs x11, hstr_el2
83 mrs x12, ICC_SRE_EL2
84 stp x11, x12, [x0, #CTX_HSTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000085
Max Shvetsovc9e2c922020-02-17 16:15:47 +000086 mrs x13, ICH_HCR_EL2
87 mrs x14, ICH_VMCR_EL2
88 stp x13, x14, [x0, #CTX_ICH_HCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000089
Max Shvetsovc9e2c922020-02-17 16:15:47 +000090 mrs x15, mair_el2
91 mrs x16, mdcr_el2
92 stp x15, x16, [x0, #CTX_MAIR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000093
Max Shvetsovc9e2c922020-02-17 16:15:47 +000094 mrs x17, PMSCR_EL2
95 mrs x9, sctlr_el2
96 stp x17, x9, [x0, #CTX_PMSCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000097
Max Shvetsovc9e2c922020-02-17 16:15:47 +000098 mrs x10, spsr_el2
99 mrs x11, sp_el2
100 stp x10, x11, [x0, #CTX_SPSR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000101
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000102 mrs x12, tcr_el2
Olivier Deprez19628912020-03-20 14:22:05 +0100103 mrs x13, tpidr_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000104 stp x12, x13, [x0, #CTX_TCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000105
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000106 mrs x14, ttbr0_el2
107 mrs x15, vbar_el2
108 stp x14, x15, [x0, #CTX_TTBR0_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000109
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000110 mrs x16, vmpidr_el2
111 mrs x17, vpidr_el2
112 stp x16, x17, [x0, #CTX_VMPIDR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000113
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000114 mrs x9, vtcr_el2
115 mrs x10, vttbr_el2
116 stp x9, x10, [x0, #CTX_VTCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000117
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000118#if CTX_INCLUDE_MTE_REGS
119 mrs x11, TFSR_EL2
120 str x11, [x0, #CTX_TFSR_EL2]
121#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000122
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000123#if ENABLE_MPAM_FOR_LOWER_ELS
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000124 mrs x9, MPAM2_EL2
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000125 mrs x10, MPAMHCR_EL2
126 stp x9, x10, [x0, #CTX_MPAM2_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000127
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000128 mrs x11, MPAMVPM0_EL2
129 mrs x12, MPAMVPM1_EL2
130 stp x11, x12, [x0, #CTX_MPAMVPM0_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000131
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000132 mrs x13, MPAMVPM2_EL2
133 mrs x14, MPAMVPM3_EL2
134 stp x13, x14, [x0, #CTX_MPAMVPM2_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000135
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000136 mrs x15, MPAMVPM4_EL2
137 mrs x16, MPAMVPM5_EL2
138 stp x15, x16, [x0, #CTX_MPAMVPM4_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000139
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000140 mrs x17, MPAMVPM6_EL2
141 mrs x9, MPAMVPM7_EL2
142 stp x17, x9, [x0, #CTX_MPAMVPM6_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000143
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000144 mrs x10, MPAMVPMV_EL2
145 str x10, [x0, #CTX_MPAMVPMV_EL2]
146#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000147
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000148
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000149#if ARM_ARCH_AT_LEAST(8, 6)
150 mrs x11, HAFGRTR_EL2
151 mrs x12, HDFGRTR_EL2
152 stp x11, x12, [x0, #CTX_HAFGRTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000153
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000154 mrs x13, HDFGWTR_EL2
155 mrs x14, HFGITR_EL2
156 stp x13, x14, [x0, #CTX_HDFGWTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000157
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000158 mrs x15, HFGRTR_EL2
159 mrs x16, HFGWTR_EL2
160 stp x15, x16, [x0, #CTX_HFGRTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000161
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000162 mrs x17, CNTPOFF_EL2
163 str x17, [x0, #CTX_CNTPOFF_EL2]
164#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000165
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000166#if ARM_ARCH_AT_LEAST(8, 4)
167 mrs x9, cnthps_ctl_el2
168 mrs x10, cnthps_cval_el2
169 stp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000170
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000171 mrs x11, cnthps_tval_el2
172 mrs x12, cnthvs_ctl_el2
173 stp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000174
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000175 mrs x13, cnthvs_cval_el2
176 mrs x14, cnthvs_tval_el2
177 stp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000178
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000179 mrs x15, cnthv_ctl_el2
180 mrs x16, cnthv_cval_el2
181 stp x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000182
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000183 mrs x17, cnthv_tval_el2
184 mrs x9, contextidr_el2
185 stp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000186
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000187 mrs x10, sder32_el2
188 str x10, [x0, #CTX_SDER32_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000189
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000190 mrs x11, ttbr1_el2
191 str x11, [x0, #CTX_TTBR1_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000192
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000193 mrs x12, vdisr_el2
194 str x12, [x0, #CTX_VDISR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000195
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000196 mrs x13, vncr_el2
197 str x13, [x0, #CTX_VNCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000198
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000199 mrs x14, vsesr_el2
200 str x14, [x0, #CTX_VSESR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000201
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000202 mrs x15, vstcr_el2
203 str x15, [x0, #CTX_VSTCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000204
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000205 mrs x16, vsttbr_el2
206 str x16, [x0, #CTX_VSTTBR_EL2]
Olivier Deprez19628912020-03-20 14:22:05 +0100207
208 mrs x17, TRFCR_EL2
209 str x17, [x0, #CTX_TRFCR_EL2]
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000210#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000211
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000212#if ARM_ARCH_AT_LEAST(8, 5)
Olivier Deprez19628912020-03-20 14:22:05 +0100213 mrs x9, scxtnum_el2
214 str x9, [x0, #CTX_SCXTNUM_EL2]
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000215#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000216
217 ret
218endfunc el2_sysregs_context_save
219
220/* -----------------------------------------------------
221 * The following function strictly follows the AArch64
222 * PCS to use x9-x17 (temporary caller-saved registers)
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000223 * to restore EL2 system register context. It assumes
224 * that 'x0' is pointing to a 'el2_sys_regs' structure
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000225 * from where the register context will be restored
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000226
227 * The following registers are not restored
228 * AMEVCNTVOFF0<n>_EL2
229 * AMEVCNTVOFF1<n>_EL2
230 * ICH_AP0R<n>_EL2
231 * ICH_AP1R<n>_EL2
232 * ICH_LR<n>_EL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000233 * -----------------------------------------------------
234 */
235func el2_sysregs_context_restore
236
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100237#if ERRATA_SPECULATIVE_AT
238/* Clear EPD0 and EPD1 bit and M bit to disable PTW */
239 mrs x9, hcr_el2
240 tst x9, #HCR_E2H_BIT
241 bne 1f
242 mrs x9, tcr_el2
243 orr x9, x9, #TCR_EPD0_BIT
244 orr x9, x9, #TCR_EPD1_BIT
245 msr tcr_el2, x9
2461: mrs x9, sctlr_el2
247 bic x9, x9, #SCTLR_M_BIT
248 msr sctlr_el2, x9
249 isb
250#endif
251
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000252 ldp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000253 msr actlr_el2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000254 msr afsr0_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000255
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000256 ldp x11, x12, [x0, #CTX_AFSR1_EL2]
257 msr afsr1_el2, x11
258 msr amair_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000259
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000260 ldp x13, x14, [x0, #CTX_CNTHCTL_EL2]
261 msr cnthctl_el2, x13
262 msr cnthp_ctl_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000263
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000264 ldp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
265 msr cnthp_cval_el2, x15
266 msr cnthp_tval_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000267
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000268 ldp x17, x9, [x0, #CTX_CNTVOFF_EL2]
269 msr cntvoff_el2, x17
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000270 msr cptr_el2, x9
271
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000272 ldp x10, x11, [x0, #CTX_DBGVCR32_EL2]
273 msr dbgvcr32_el2, x10
274 msr elr_el2, x11
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000275
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000276 ldp x14, x15, [x0, #CTX_ESR_EL2]
277 msr esr_el2, x14
278 msr far_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000279
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000280 ldp x16, x17, [x0, #CTX_FPEXC32_EL2]
281 msr fpexc32_el2, x16
282 msr hacr_el2, x17
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000283
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000284 ldp x9, x10, [x0, #CTX_HCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000285 msr hcr_el2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000286 msr hpfar_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000287
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000288 ldp x11, x12, [x0, #CTX_HSTR_EL2]
289 msr hstr_el2, x11
290 msr ICC_SRE_EL2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000291
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000292 ldp x13, x14, [x0, #CTX_ICH_HCR_EL2]
293 msr ICH_HCR_EL2, x13
294 msr ICH_VMCR_EL2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000295
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000296 ldp x15, x16, [x0, #CTX_MAIR_EL2]
297 msr mair_el2, x15
298 msr mdcr_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000299
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100300 ldr x17, [x0, #CTX_PMSCR_EL2]
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000301 msr PMSCR_EL2, x17
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000302
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000303 ldp x10, x11, [x0, #CTX_SPSR_EL2]
304 msr spsr_el2, x10
305 msr sp_el2, x11
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000306
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100307 ldr x12, [x0, #CTX_TPIDR_EL2]
308 msr tpidr_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000309
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000310 ldp x14, x15, [x0, #CTX_TTBR0_EL2]
311 msr ttbr0_el2, x14
312 msr vbar_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000313
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000314 ldp x16, x17, [x0, #CTX_VMPIDR_EL2]
315 msr vmpidr_el2, x16
316 msr vpidr_el2, x17
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000317
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000318 ldp x9, x10, [x0, #CTX_VTCR_EL2]
319 msr vtcr_el2, x9
320 msr vttbr_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000321
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000322#if CTX_INCLUDE_MTE_REGS
323 ldr x11, [x0, #CTX_TFSR_EL2]
324 msr TFSR_EL2, x11
325#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000326
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000327#if ENABLE_MPAM_FOR_LOWER_ELS
328 ldp x9, x10, [x0, #CTX_MPAM2_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000329 msr MPAM2_EL2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000330 msr MPAMHCR_EL2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000331
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000332 ldp x11, x12, [x0, #CTX_MPAMVPM0_EL2]
333 msr MPAMVPM0_EL2, x11
334 msr MPAMVPM1_EL2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000335
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000336 ldp x13, x14, [x0, #CTX_MPAMVPM2_EL2]
337 msr MPAMVPM2_EL2, x13
338 msr MPAMVPM3_EL2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000339
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000340 ldp x15, x16, [x0, #CTX_MPAMVPM4_EL2]
341 msr MPAMVPM4_EL2, x15
342 msr MPAMVPM5_EL2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000343
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000344 ldp x17, x9, [x0, #CTX_MPAMVPM6_EL2]
345 msr MPAMVPM6_EL2, x17
346 msr MPAMVPM7_EL2, x9
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000347
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000348 ldr x10, [x0, #CTX_MPAMVPMV_EL2]
349 msr MPAMVPMV_EL2, x10
350#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000351
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000352#if ARM_ARCH_AT_LEAST(8, 6)
353 ldp x11, x12, [x0, #CTX_HAFGRTR_EL2]
354 msr HAFGRTR_EL2, x11
355 msr HDFGRTR_EL2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000356
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000357 ldp x13, x14, [x0, #CTX_HDFGWTR_EL2]
358 msr HDFGWTR_EL2, x13
359 msr HFGITR_EL2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000360
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000361 ldp x15, x16, [x0, #CTX_HFGRTR_EL2]
362 msr HFGRTR_EL2, x15
363 msr HFGWTR_EL2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000364
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000365 ldr x17, [x0, #CTX_CNTPOFF_EL2]
366 msr CNTPOFF_EL2, x17
367#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000368
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000369#if ARM_ARCH_AT_LEAST(8, 4)
370 ldp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
371 msr cnthps_ctl_el2, x9
372 msr cnthps_cval_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000373
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000374 ldp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
375 msr cnthps_tval_el2, x11
376 msr cnthvs_ctl_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000377
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000378 ldp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
379 msr cnthvs_cval_el2, x13
380 msr cnthvs_tval_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000381
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000382 ldp x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
383 msr cnthv_ctl_el2, x15
384 msr cnthv_cval_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000385
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000386 ldp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
387 msr cnthv_tval_el2, x17
388 msr contextidr_el2, x9
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000389
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000390 ldr x10, [x0, #CTX_SDER32_EL2]
391 msr sder32_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000392
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000393 ldr x11, [x0, #CTX_TTBR1_EL2]
394 msr ttbr1_el2, x11
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000395
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000396 ldr x12, [x0, #CTX_VDISR_EL2]
397 msr vdisr_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000398
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000399 ldr x13, [x0, #CTX_VNCR_EL2]
400 msr vncr_el2, x13
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000401
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000402 ldr x14, [x0, #CTX_VSESR_EL2]
403 msr vsesr_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000404
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000405 ldr x15, [x0, #CTX_VSTCR_EL2]
406 msr vstcr_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000407
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000408 ldr x16, [x0, #CTX_VSTTBR_EL2]
409 msr vsttbr_el2, x16
Olivier Deprez19628912020-03-20 14:22:05 +0100410
411 ldr x17, [x0, #CTX_TRFCR_EL2]
412 msr TRFCR_EL2, x17
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000413#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000414
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000415#if ARM_ARCH_AT_LEAST(8, 5)
Olivier Deprez19628912020-03-20 14:22:05 +0100416 ldr x9, [x0, #CTX_SCXTNUM_EL2]
417 msr scxtnum_el2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000418#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000419
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100420#if ERRATA_SPECULATIVE_AT
421/*
422 * Make sure all registers are stored successfully except
423 * SCTLR_EL2 and TCR_EL2
424 */
425 isb
426#endif
427
428 ldr x9, [x0, #CTX_SCTLR_EL2]
429 msr sctlr_el2, x9
430 ldr x9, [x0, #CTX_TCR_EL2]
431 msr tcr_el2, x9
432
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000433 ret
434endfunc el2_sysregs_context_restore
435
436#endif /* CTX_INCLUDE_EL2_REGS */
437
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100438/* ------------------------------------------------------------------
439 * The following function strictly follows the AArch64 PCS to use
440 * x9-x17 (temporary caller-saved registers) to save EL1 system
441 * register context. It assumes that 'x0' is pointing to a
442 * 'el1_sys_regs' structure where the register context will be saved.
443 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000444 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000445func el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000446
447 mrs x9, spsr_el1
448 mrs x10, elr_el1
449 stp x9, x10, [x0, #CTX_SPSR_EL1]
450
Achin Gupta9ac63c52014-01-16 12:08:03 +0000451 mrs x15, sctlr_el1
452 mrs x16, actlr_el1
453 stp x15, x16, [x0, #CTX_SCTLR_EL1]
454
455 mrs x17, cpacr_el1
456 mrs x9, csselr_el1
457 stp x17, x9, [x0, #CTX_CPACR_EL1]
458
459 mrs x10, sp_el1
460 mrs x11, esr_el1
461 stp x10, x11, [x0, #CTX_SP_EL1]
462
463 mrs x12, ttbr0_el1
464 mrs x13, ttbr1_el1
465 stp x12, x13, [x0, #CTX_TTBR0_EL1]
466
467 mrs x14, mair_el1
468 mrs x15, amair_el1
469 stp x14, x15, [x0, #CTX_MAIR_EL1]
470
471 mrs x16, tcr_el1
472 mrs x17, tpidr_el1
473 stp x16, x17, [x0, #CTX_TCR_EL1]
474
475 mrs x9, tpidr_el0
476 mrs x10, tpidrro_el0
477 stp x9, x10, [x0, #CTX_TPIDR_EL0]
478
Achin Gupta9ac63c52014-01-16 12:08:03 +0000479 mrs x13, par_el1
480 mrs x14, far_el1
481 stp x13, x14, [x0, #CTX_PAR_EL1]
482
483 mrs x15, afsr0_el1
484 mrs x16, afsr1_el1
485 stp x15, x16, [x0, #CTX_AFSR0_EL1]
486
487 mrs x17, contextidr_el1
488 mrs x9, vbar_el1
489 stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
490
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100491 /* Save AArch32 system registers if the build has instructed so */
492#if CTX_INCLUDE_AARCH32_REGS
493 mrs x11, spsr_abt
494 mrs x12, spsr_und
495 stp x11, x12, [x0, #CTX_SPSR_ABT]
496
497 mrs x13, spsr_irq
498 mrs x14, spsr_fiq
499 stp x13, x14, [x0, #CTX_SPSR_IRQ]
500
501 mrs x15, dacr32_el2
502 mrs x16, ifsr32_el2
503 stp x15, x16, [x0, #CTX_DACR32_EL2]
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100504#endif
505
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100506 /* Save NS timer registers if the build has instructed so */
507#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000508 mrs x10, cntp_ctl_el0
509 mrs x11, cntp_cval_el0
510 stp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
511
512 mrs x12, cntv_ctl_el0
513 mrs x13, cntv_cval_el0
514 stp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
515
516 mrs x14, cntkctl_el1
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100517 str x14, [x0, #CTX_CNTKCTL_EL1]
518#endif
519
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100520 /* Save MTE system registers if the build has instructed so */
521#if CTX_INCLUDE_MTE_REGS
522 mrs x15, TFSRE0_EL1
523 mrs x16, TFSR_EL1
524 stp x15, x16, [x0, #CTX_TFSRE0_EL1]
525
526 mrs x9, RGSR_EL1
527 mrs x10, GCR_EL1
528 stp x9, x10, [x0, #CTX_RGSR_EL1]
529#endif
530
Achin Gupta9ac63c52014-01-16 12:08:03 +0000531 ret
Kévin Petita877c252015-03-24 14:03:57 +0000532endfunc el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000533
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100534/* ------------------------------------------------------------------
535 * The following function strictly follows the AArch64 PCS to use
536 * x9-x17 (temporary caller-saved registers) to restore EL1 system
537 * register context. It assumes that 'x0' is pointing to a
538 * 'el1_sys_regs' structure from where the register context will be
539 * restored
540 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000541 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000542func el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000543
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100544#if ERRATA_SPECULATIVE_AT
545 mrs x9, tcr_el1
546 orr x9, x9, #TCR_EPD0_BIT
547 orr x9, x9, #TCR_EPD1_BIT
548 msr tcr_el1, x9
549 mrs x9, sctlr_el1
550 bic x9, x9, #SCTLR_M_BIT
551 msr sctlr_el1, x9
552 isb
553#endif
554
Achin Gupta9ac63c52014-01-16 12:08:03 +0000555 ldp x9, x10, [x0, #CTX_SPSR_EL1]
556 msr spsr_el1, x9
557 msr elr_el1, x10
558
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100559 ldr x16, [x0, #CTX_ACTLR_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000560 msr actlr_el1, x16
561
562 ldp x17, x9, [x0, #CTX_CPACR_EL1]
563 msr cpacr_el1, x17
564 msr csselr_el1, x9
565
566 ldp x10, x11, [x0, #CTX_SP_EL1]
567 msr sp_el1, x10
568 msr esr_el1, x11
569
570 ldp x12, x13, [x0, #CTX_TTBR0_EL1]
571 msr ttbr0_el1, x12
572 msr ttbr1_el1, x13
573
574 ldp x14, x15, [x0, #CTX_MAIR_EL1]
575 msr mair_el1, x14
576 msr amair_el1, x15
577
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100578 ldr x16,[x0, #CTX_TPIDR_EL1]
579 msr tpidr_el1, x16
Achin Gupta9ac63c52014-01-16 12:08:03 +0000580
581 ldp x9, x10, [x0, #CTX_TPIDR_EL0]
582 msr tpidr_el0, x9
583 msr tpidrro_el0, x10
584
Achin Gupta9ac63c52014-01-16 12:08:03 +0000585 ldp x13, x14, [x0, #CTX_PAR_EL1]
586 msr par_el1, x13
587 msr far_el1, x14
588
589 ldp x15, x16, [x0, #CTX_AFSR0_EL1]
590 msr afsr0_el1, x15
591 msr afsr1_el1, x16
592
593 ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
594 msr contextidr_el1, x17
595 msr vbar_el1, x9
596
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100597 /* Restore AArch32 system registers if the build has instructed so */
598#if CTX_INCLUDE_AARCH32_REGS
599 ldp x11, x12, [x0, #CTX_SPSR_ABT]
600 msr spsr_abt, x11
601 msr spsr_und, x12
602
603 ldp x13, x14, [x0, #CTX_SPSR_IRQ]
604 msr spsr_irq, x13
605 msr spsr_fiq, x14
606
607 ldp x15, x16, [x0, #CTX_DACR32_EL2]
608 msr dacr32_el2, x15
609 msr ifsr32_el2, x16
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100610#endif
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100611 /* Restore NS timer registers if the build has instructed so */
612#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000613 ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
614 msr cntp_ctl_el0, x10
615 msr cntp_cval_el0, x11
616
617 ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
618 msr cntv_ctl_el0, x12
619 msr cntv_cval_el0, x13
620
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100621 ldr x14, [x0, #CTX_CNTKCTL_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000622 msr cntkctl_el1, x14
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100623#endif
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100624 /* Restore MTE system registers if the build has instructed so */
625#if CTX_INCLUDE_MTE_REGS
626 ldp x11, x12, [x0, #CTX_TFSRE0_EL1]
627 msr TFSRE0_EL1, x11
628 msr TFSR_EL1, x12
629
630 ldp x13, x14, [x0, #CTX_RGSR_EL1]
631 msr RGSR_EL1, x13
632 msr GCR_EL1, x14
633#endif
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100634
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100635#if ERRATA_SPECULATIVE_AT
636/*
637 * Make sure all registers are stored successfully except
638 * SCTLR_EL1 and TCR_EL1
639 */
640 isb
641#endif
642
643 ldr x9, [x0, #CTX_SCTLR_EL1]
644 msr sctlr_el1, x9
645 ldr x9, [x0, #CTX_TCR_EL1]
646 msr tcr_el1, x9
647
Achin Gupta9ac63c52014-01-16 12:08:03 +0000648 /* No explict ISB required here as ERET covers it */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000649 ret
Kévin Petita877c252015-03-24 14:03:57 +0000650endfunc el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000651
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100652/* ------------------------------------------------------------------
653 * The following function follows the aapcs_64 strictly to use
654 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
655 * to save floating point register context. It assumes that 'x0' is
656 * pointing to a 'fp_regs' structure where the register context will
Achin Gupta9ac63c52014-01-16 12:08:03 +0000657 * be saved.
658 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100659 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
660 * However currently we don't use VFP registers nor set traps in
661 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000662 *
663 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100664 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000665 */
Juan Castillo258e94f2014-06-25 17:26:36 +0100666#if CTX_INCLUDE_FPREGS
Andrew Thoelke38bde412014-03-18 13:46:55 +0000667func fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000668 stp q0, q1, [x0, #CTX_FP_Q0]
669 stp q2, q3, [x0, #CTX_FP_Q2]
670 stp q4, q5, [x0, #CTX_FP_Q4]
671 stp q6, q7, [x0, #CTX_FP_Q6]
672 stp q8, q9, [x0, #CTX_FP_Q8]
673 stp q10, q11, [x0, #CTX_FP_Q10]
674 stp q12, q13, [x0, #CTX_FP_Q12]
675 stp q14, q15, [x0, #CTX_FP_Q14]
676 stp q16, q17, [x0, #CTX_FP_Q16]
677 stp q18, q19, [x0, #CTX_FP_Q18]
678 stp q20, q21, [x0, #CTX_FP_Q20]
679 stp q22, q23, [x0, #CTX_FP_Q22]
680 stp q24, q25, [x0, #CTX_FP_Q24]
681 stp q26, q27, [x0, #CTX_FP_Q26]
682 stp q28, q29, [x0, #CTX_FP_Q28]
683 stp q30, q31, [x0, #CTX_FP_Q30]
684
685 mrs x9, fpsr
686 str x9, [x0, #CTX_FP_FPSR]
687
688 mrs x10, fpcr
689 str x10, [x0, #CTX_FP_FPCR]
690
David Cunadod1a1fd42017-10-20 11:30:57 +0100691#if CTX_INCLUDE_AARCH32_REGS
692 mrs x11, fpexc32_el2
693 str x11, [x0, #CTX_FP_FPEXC32_EL2]
694#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000695 ret
Kévin Petita877c252015-03-24 14:03:57 +0000696endfunc fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000697
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100698/* ------------------------------------------------------------------
699 * The following function follows the aapcs_64 strictly to use x9-x17
700 * (temporary caller-saved registers according to AArch64 PCS) to
701 * restore floating point register context. It assumes that 'x0' is
702 * pointing to a 'fp_regs' structure from where the register context
Achin Gupta9ac63c52014-01-16 12:08:03 +0000703 * will be restored.
704 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100705 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
706 * However currently we don't use VFP registers nor set traps in
707 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000708 *
709 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100710 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000711 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000712func fpregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000713 ldp q0, q1, [x0, #CTX_FP_Q0]
714 ldp q2, q3, [x0, #CTX_FP_Q2]
715 ldp q4, q5, [x0, #CTX_FP_Q4]
716 ldp q6, q7, [x0, #CTX_FP_Q6]
717 ldp q8, q9, [x0, #CTX_FP_Q8]
718 ldp q10, q11, [x0, #CTX_FP_Q10]
719 ldp q12, q13, [x0, #CTX_FP_Q12]
720 ldp q14, q15, [x0, #CTX_FP_Q14]
721 ldp q16, q17, [x0, #CTX_FP_Q16]
722 ldp q18, q19, [x0, #CTX_FP_Q18]
723 ldp q20, q21, [x0, #CTX_FP_Q20]
724 ldp q22, q23, [x0, #CTX_FP_Q22]
725 ldp q24, q25, [x0, #CTX_FP_Q24]
726 ldp q26, q27, [x0, #CTX_FP_Q26]
727 ldp q28, q29, [x0, #CTX_FP_Q28]
728 ldp q30, q31, [x0, #CTX_FP_Q30]
729
730 ldr x9, [x0, #CTX_FP_FPSR]
731 msr fpsr, x9
732
Soby Mathewe77e1162015-12-03 09:42:50 +0000733 ldr x10, [x0, #CTX_FP_FPCR]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000734 msr fpcr, x10
735
David Cunadod1a1fd42017-10-20 11:30:57 +0100736#if CTX_INCLUDE_AARCH32_REGS
737 ldr x11, [x0, #CTX_FP_FPEXC32_EL2]
738 msr fpexc32_el2, x11
739#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000740 /*
741 * No explict ISB required here as ERET to
Sandrine Bailleuxf4119ec2015-12-17 13:58:58 +0000742 * switch to secure EL1 or non-secure world
Achin Gupta9ac63c52014-01-16 12:08:03 +0000743 * covers it
744 */
745
746 ret
Kévin Petita877c252015-03-24 14:03:57 +0000747endfunc fpregs_context_restore
Juan Castillo258e94f2014-06-25 17:26:36 +0100748#endif /* CTX_INCLUDE_FPREGS */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100749
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100750/* ------------------------------------------------------------------
751 * The following function is used to save and restore all the general
752 * purpose and ARMv8.3-PAuth (if enabled) registers.
753 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
754 * when ARMv8.5-PMU is implemented, and if called from Non-secure
755 * state saves PMCR_EL0 and disables Cycle Counter.
756 *
757 * Ideally we would only save and restore the callee saved registers
758 * when a world switch occurs but that type of implementation is more
759 * complex. So currently we will always save and restore these
760 * registers on entry and exit of EL3.
761 * These are not macros to ensure their invocation fits within the 32
762 * instructions per exception vector.
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100763 * clobbers: x18
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100764 * ------------------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100765 */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100766func save_gp_pmcr_pauth_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100767 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
768 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
769 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
770 stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
771 stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
772 stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
773 stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
774 stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
775 stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
776 stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
777 stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
778 stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
779 stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
780 stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
781 stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
782 mrs x18, sp_el0
783 str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100784
785 /* ----------------------------------------------------------
786 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
787 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
788 * should be saved in non-secure context.
789 * ----------------------------------------------------------
790 */
791 mrs x9, mdcr_el3
792 tst x9, #MDCR_SCCD_BIT
793 bne 1f
794
795 /* Secure Cycle Counter is not disabled */
796 mrs x9, pmcr_el0
797
798 /* Check caller's security state */
799 mrs x10, scr_el3
800 tst x10, #SCR_NS_BIT
801 beq 2f
802
803 /* Save PMCR_EL0 if called from Non-secure state */
804 str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
805
806 /* Disable cycle counter when event counting is prohibited */
8072: orr x9, x9, #PMCR_EL0_DP_BIT
808 msr pmcr_el0, x9
809 isb
8101:
811#if CTX_INCLUDE_PAUTH_REGS
812 /* ----------------------------------------------------------
813 * Save the ARMv8.3-PAuth keys as they are not banked
814 * by exception level
815 * ----------------------------------------------------------
816 */
817 add x19, sp, #CTX_PAUTH_REGS_OFFSET
818
819 mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */
820 mrs x21, APIAKeyHi_EL1
821 mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */
822 mrs x23, APIBKeyHi_EL1
823 mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */
824 mrs x25, APDAKeyHi_EL1
825 mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */
826 mrs x27, APDBKeyHi_EL1
827 mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */
828 mrs x29, APGAKeyHi_EL1
829
830 stp x20, x21, [x19, #CTX_PACIAKEY_LO]
831 stp x22, x23, [x19, #CTX_PACIBKEY_LO]
832 stp x24, x25, [x19, #CTX_PACDAKEY_LO]
833 stp x26, x27, [x19, #CTX_PACDBKEY_LO]
834 stp x28, x29, [x19, #CTX_PACGAKEY_LO]
835#endif /* CTX_INCLUDE_PAUTH_REGS */
836
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100837 ret
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100838endfunc save_gp_pmcr_pauth_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100839
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100840/* ------------------------------------------------------------------
841 * This function restores ARMv8.3-PAuth (if enabled) and all general
842 * purpose registers except x30 from the CPU context.
843 * x30 register must be explicitly restored by the caller.
844 * ------------------------------------------------------------------
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000845 */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100846func restore_gp_pmcr_pauth_regs
847#if CTX_INCLUDE_PAUTH_REGS
848 /* Restore the ARMv8.3 PAuth keys */
849 add x10, sp, #CTX_PAUTH_REGS_OFFSET
850
851 ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */
852 ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */
853 ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */
854 ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */
855 ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */
856
857 msr APIAKeyLo_EL1, x0
858 msr APIAKeyHi_EL1, x1
859 msr APIBKeyLo_EL1, x2
860 msr APIBKeyHi_EL1, x3
861 msr APDAKeyLo_EL1, x4
862 msr APDAKeyHi_EL1, x5
863 msr APDBKeyLo_EL1, x6
864 msr APDBKeyHi_EL1, x7
865 msr APGAKeyLo_EL1, x8
866 msr APGAKeyHi_EL1, x9
867#endif /* CTX_INCLUDE_PAUTH_REGS */
868
869 /* ----------------------------------------------------------
870 * Restore PMCR_EL0 when returning to Non-secure state if
871 * Secure Cycle Counter is not disabled in MDCR_EL3 when
872 * ARMv8.5-PMU is implemented.
873 * ----------------------------------------------------------
874 */
875 mrs x0, scr_el3
876 tst x0, #SCR_NS_BIT
877 beq 2f
878
879 /* ----------------------------------------------------------
880 * Back to Non-secure state.
881 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
882 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
883 * should be restored from non-secure context.
884 * ----------------------------------------------------------
885 */
886 mrs x0, mdcr_el3
887 tst x0, #MDCR_SCCD_BIT
888 bne 2f
889 ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
890 msr pmcr_el0, x0
8912:
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100892 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
893 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100894 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
895 ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
896 ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
897 ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
898 ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
899 ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000900 ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100901 ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
902 ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
903 ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
904 ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
905 ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000906 ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
907 msr sp_el0, x28
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100908 ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000909 ret
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100910endfunc restore_gp_pmcr_pauth_regs
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000911
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100912/* ------------------------------------------------------------------
913 * This routine assumes that the SP_EL3 is pointing to a valid
914 * context structure from where the gp regs and other special
915 * registers can be retrieved.
916 * ------------------------------------------------------------------
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +0000917 */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100918func el3_exit
Jan Dabrosfa015982019-12-02 13:30:03 +0100919#if ENABLE_ASSERTIONS
920 /* el3_exit assumes SP_EL0 on entry */
921 mrs x17, spsel
922 cmp x17, #MODE_SP_EL0
923 ASM_ASSERT(eq)
924#endif
925
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100926 /* ----------------------------------------------------------
927 * Save the current SP_EL0 i.e. the EL3 runtime stack which
928 * will be used for handling the next SMC.
929 * Then switch to SP_EL3.
930 * ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100931 */
932 mov x17, sp
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100933 msr spsel, #MODE_SP_ELX
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100934 str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
935
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100936 /* ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100937 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100938 * ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100939 */
940 ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
941 ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
942 msr scr_el3, x18
943 msr spsr_el3, x16
944 msr elr_el3, x17
945
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100946#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100947 /* ----------------------------------------------------------
948 * Restore mitigation state as it was on entry to EL3
949 * ----------------------------------------------------------
950 */
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100951 ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100952 cbz x17, 1f
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100953 blr x17
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +00009541:
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100955#endif
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100956 /* ----------------------------------------------------------
957 * Restore general purpose (including x30), PMCR_EL0 and
958 * ARMv8.3-PAuth registers.
959 * Exit EL3 via ERET to a lower exception level.
960 * ----------------------------------------------------------
961 */
962 bl restore_gp_pmcr_pauth_regs
963 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100964
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100965#if IMAGE_BL31 && RAS_EXTENSION
966 /* ----------------------------------------------------------
967 * Issue Error Synchronization Barrier to synchronize SErrors
968 * before exiting EL3. We're running with EAs unmasked, so
969 * any synchronized errors would be taken immediately;
970 * therefore no need to inspect DISR_EL1 register.
971 * ----------------------------------------------------------
972 */
973 esb
Antonio Nino Diaz594811b2019-01-31 11:58:00 +0000974#endif
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -0800975 exception_return
Antonio Nino Diaz594811b2019-01-31 11:58:00 +0000976
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100977endfunc el3_exit