blob: 1568ef05b27a67a4d3c363b87f57953744729a87 [file] [log] [blame]
Achin Gupta9ac63c52014-01-16 12:08:03 +00001/*
Max Shvetsovbdf502d2020-02-25 13:56:19 +00002 * Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
Achin Gupta9ac63c52014-01-16 12:08:03 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta9ac63c52014-01-16 12:08:03 +00005 */
6
Dan Handley2bd4ef22014-04-09 13:14:54 +01007#include <arch.h>
Andrew Thoelke38bde412014-03-18 13:46:55 +00008#include <asm_macros.S>
Jan Dabrosfa015982019-12-02 13:30:03 +01009#include <assert_macros.S>
Dan Handley2bd4ef22014-04-09 13:14:54 +010010#include <context.h>
Achin Gupta9ac63c52014-01-16 12:08:03 +000011
Max Shvetsovbdf502d2020-02-25 13:56:19 +000012#if CTX_INCLUDE_EL2_REGS
13 .global el2_sysregs_context_save
14 .global el2_sysregs_context_restore
15#endif
16
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010017 .global el1_sysregs_context_save
18 .global el1_sysregs_context_restore
19#if CTX_INCLUDE_FPREGS
20 .global fpregs_context_save
21 .global fpregs_context_restore
22#endif
Alexei Fedorovf41355c2019-09-13 14:11:59 +010023 .global save_gp_pmcr_pauth_regs
24 .global restore_gp_pmcr_pauth_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010025 .global el3_exit
26
Max Shvetsovbdf502d2020-02-25 13:56:19 +000027#if CTX_INCLUDE_EL2_REGS
28
29/* -----------------------------------------------------
30 * The following function strictly follows the AArch64
31 * PCS to use x9-x17 (temporary caller-saved registers)
Max Shvetsovc9e2c922020-02-17 16:15:47 +000032 * to save EL2 system register context. It assumes that
33 * 'x0' is pointing to a 'el2_sys_regs' structure where
Max Shvetsovbdf502d2020-02-25 13:56:19 +000034 * the register context will be saved.
Max Shvetsovc9e2c922020-02-17 16:15:47 +000035 *
36 * The following registers are not added.
37 * AMEVCNTVOFF0<n>_EL2
38 * AMEVCNTVOFF1<n>_EL2
39 * ICH_AP0R<n>_EL2
40 * ICH_AP1R<n>_EL2
41 * ICH_LR<n>_EL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +000042 * -----------------------------------------------------
43 */
Max Shvetsovbdf502d2020-02-25 13:56:19 +000044
Max Shvetsovc9e2c922020-02-17 16:15:47 +000045func el2_sysregs_context_save
Max Shvetsovbdf502d2020-02-25 13:56:19 +000046 mrs x9, actlr_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000047 mrs x10, afsr0_el2
48 stp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000049
Max Shvetsovc9e2c922020-02-17 16:15:47 +000050 mrs x11, afsr1_el2
51 mrs x12, amair_el2
52 stp x11, x12, [x0, #CTX_AFSR1_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000053
Max Shvetsovc9e2c922020-02-17 16:15:47 +000054 mrs x13, cnthctl_el2
55 mrs x14, cnthp_ctl_el2
56 stp x13, x14, [x0, #CTX_CNTHCTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000057
Max Shvetsovc9e2c922020-02-17 16:15:47 +000058 mrs x15, cnthp_cval_el2
59 mrs x16, cnthp_tval_el2
60 stp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000061
Max Shvetsovc9e2c922020-02-17 16:15:47 +000062 mrs x17, cntvoff_el2
Max Shvetsovbdf502d2020-02-25 13:56:19 +000063 mrs x9, cptr_el2
Max Shvetsovc9e2c922020-02-17 16:15:47 +000064 stp x17, x9, [x0, #CTX_CNTVOFF_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000065
Max Shvetsovc9e2c922020-02-17 16:15:47 +000066 mrs x10, dbgvcr32_el2
67 mrs x11, elr_el2
68 stp x10, x11, [x0, #CTX_DBGVCR32_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000069
Max Shvetsovc9e2c922020-02-17 16:15:47 +000070 mrs x14, esr_el2
71 mrs x15, far_el2
72 stp x14, x15, [x0, #CTX_ESR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000073
Max Shvetsov0c16abd2020-05-13 18:15:39 +010074 mrs x16, hacr_el2
75 mrs x17, hcr_el2
76 stp x16, x17, [x0, #CTX_HACR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000077
Max Shvetsov0c16abd2020-05-13 18:15:39 +010078 mrs x9, hpfar_el2
79 mrs x10, hstr_el2
80 stp x9, x10, [x0, #CTX_HPFAR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000081
Max Shvetsov0c16abd2020-05-13 18:15:39 +010082 mrs x11, ICC_SRE_EL2
83 mrs x12, ICH_HCR_EL2
84 stp x11, x12, [x0, #CTX_ICC_SRE_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000085
Max Shvetsov0c16abd2020-05-13 18:15:39 +010086 mrs x13, ICH_VMCR_EL2
87 mrs x14, mair_el2
88 stp x13, x14, [x0, #CTX_ICH_VMCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000089
Max Shvetsov0c16abd2020-05-13 18:15:39 +010090 mrs x15, mdcr_el2
91 mrs x16, PMSCR_EL2
92 stp x15, x16, [x0, #CTX_MDCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000093
Max Shvetsov0c16abd2020-05-13 18:15:39 +010094 mrs x17, sctlr_el2
95 mrs x9, spsr_el2
96 stp x17, x9, [x0, #CTX_SCTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +000097
Max Shvetsov0c16abd2020-05-13 18:15:39 +010098 mrs x10, sp_el2
99 mrs x11, tcr_el2
100 stp x10, x11, [x0, #CTX_SP_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000101
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100102 mrs x12, tpidr_el2
103 mrs x13, ttbr0_el2
104 stp x12, x13, [x0, #CTX_TPIDR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000105
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100106 mrs x14, vbar_el2
107 mrs x15, vmpidr_el2
108 stp x14, x15, [x0, #CTX_VBAR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000109
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100110 mrs x16, vpidr_el2
111 mrs x17, vtcr_el2
112 stp x16, x17, [x0, #CTX_VPIDR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000113
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100114 mrs x9, vttbr_el2
115 str x9, [x0, #CTX_VTTBR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000116
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000117#if CTX_INCLUDE_MTE_REGS
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100118 mrs x10, TFSR_EL2
119 str x10, [x0, #CTX_TFSR_EL2]
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000120#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000121
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000122#if ENABLE_MPAM_FOR_LOWER_ELS
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000123 mrs x9, MPAM2_EL2
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000124 mrs x10, MPAMHCR_EL2
125 stp x9, x10, [x0, #CTX_MPAM2_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000126
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000127 mrs x11, MPAMVPM0_EL2
128 mrs x12, MPAMVPM1_EL2
129 stp x11, x12, [x0, #CTX_MPAMVPM0_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000130
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000131 mrs x13, MPAMVPM2_EL2
132 mrs x14, MPAMVPM3_EL2
133 stp x13, x14, [x0, #CTX_MPAMVPM2_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000134
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000135 mrs x15, MPAMVPM4_EL2
136 mrs x16, MPAMVPM5_EL2
137 stp x15, x16, [x0, #CTX_MPAMVPM4_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000138
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000139 mrs x17, MPAMVPM6_EL2
140 mrs x9, MPAMVPM7_EL2
141 stp x17, x9, [x0, #CTX_MPAMVPM6_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000142
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000143 mrs x10, MPAMVPMV_EL2
144 str x10, [x0, #CTX_MPAMVPMV_EL2]
145#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000146
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000147
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000148#if ARM_ARCH_AT_LEAST(8, 6)
149 mrs x11, HAFGRTR_EL2
150 mrs x12, HDFGRTR_EL2
151 stp x11, x12, [x0, #CTX_HAFGRTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000152
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000153 mrs x13, HDFGWTR_EL2
154 mrs x14, HFGITR_EL2
155 stp x13, x14, [x0, #CTX_HDFGWTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000156
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000157 mrs x15, HFGRTR_EL2
158 mrs x16, HFGWTR_EL2
159 stp x15, x16, [x0, #CTX_HFGRTR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000160
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000161 mrs x17, CNTPOFF_EL2
162 str x17, [x0, #CTX_CNTPOFF_EL2]
163#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000164
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000165#if ARM_ARCH_AT_LEAST(8, 4)
166 mrs x9, cnthps_ctl_el2
167 mrs x10, cnthps_cval_el2
168 stp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000169
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000170 mrs x11, cnthps_tval_el2
171 mrs x12, cnthvs_ctl_el2
172 stp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000173
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000174 mrs x13, cnthvs_cval_el2
175 mrs x14, cnthvs_tval_el2
176 stp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000177
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000178 mrs x15, cnthv_ctl_el2
179 mrs x16, cnthv_cval_el2
180 stp x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000181
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000182 mrs x17, cnthv_tval_el2
183 mrs x9, contextidr_el2
184 stp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000185
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000186 mrs x10, sder32_el2
187 str x10, [x0, #CTX_SDER32_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000188
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000189 mrs x11, ttbr1_el2
190 str x11, [x0, #CTX_TTBR1_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000191
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000192 mrs x12, vdisr_el2
193 str x12, [x0, #CTX_VDISR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000194
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000195 mrs x13, vncr_el2
196 str x13, [x0, #CTX_VNCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000197
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000198 mrs x14, vsesr_el2
199 str x14, [x0, #CTX_VSESR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000200
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000201 mrs x15, vstcr_el2
202 str x15, [x0, #CTX_VSTCR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000203
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000204 mrs x16, vsttbr_el2
205 str x16, [x0, #CTX_VSTTBR_EL2]
Olivier Deprez19628912020-03-20 14:22:05 +0100206
207 mrs x17, TRFCR_EL2
208 str x17, [x0, #CTX_TRFCR_EL2]
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000209#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000210
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000211#if ARM_ARCH_AT_LEAST(8, 5)
Olivier Deprez19628912020-03-20 14:22:05 +0100212 mrs x9, scxtnum_el2
213 str x9, [x0, #CTX_SCXTNUM_EL2]
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000214#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000215
216 ret
217endfunc el2_sysregs_context_save
218
219/* -----------------------------------------------------
220 * The following function strictly follows the AArch64
221 * PCS to use x9-x17 (temporary caller-saved registers)
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000222 * to restore EL2 system register context. It assumes
223 * that 'x0' is pointing to a 'el2_sys_regs' structure
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000224 * from where the register context will be restored
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000225
226 * The following registers are not restored
227 * AMEVCNTVOFF0<n>_EL2
228 * AMEVCNTVOFF1<n>_EL2
229 * ICH_AP0R<n>_EL2
230 * ICH_AP1R<n>_EL2
231 * ICH_LR<n>_EL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000232 * -----------------------------------------------------
233 */
234func el2_sysregs_context_restore
235
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100236#if ERRATA_SPECULATIVE_AT
237/* Clear EPD0 and EPD1 bit and M bit to disable PTW */
238 mrs x9, hcr_el2
239 tst x9, #HCR_E2H_BIT
240 bne 1f
241 mrs x9, tcr_el2
242 orr x9, x9, #TCR_EPD0_BIT
243 orr x9, x9, #TCR_EPD1_BIT
244 msr tcr_el2, x9
2451: mrs x9, sctlr_el2
246 bic x9, x9, #SCTLR_M_BIT
247 msr sctlr_el2, x9
248 isb
249#endif
250
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000251 ldp x9, x10, [x0, #CTX_ACTLR_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000252 msr actlr_el2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000253 msr afsr0_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000254
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000255 ldp x11, x12, [x0, #CTX_AFSR1_EL2]
256 msr afsr1_el2, x11
257 msr amair_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000258
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000259 ldp x13, x14, [x0, #CTX_CNTHCTL_EL2]
260 msr cnthctl_el2, x13
261 msr cnthp_ctl_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000262
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000263 ldp x15, x16, [x0, #CTX_CNTHP_CVAL_EL2]
264 msr cnthp_cval_el2, x15
265 msr cnthp_tval_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000266
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000267 ldp x17, x9, [x0, #CTX_CNTVOFF_EL2]
268 msr cntvoff_el2, x17
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000269 msr cptr_el2, x9
270
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000271 ldp x10, x11, [x0, #CTX_DBGVCR32_EL2]
272 msr dbgvcr32_el2, x10
273 msr elr_el2, x11
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000274
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000275 ldp x14, x15, [x0, #CTX_ESR_EL2]
276 msr esr_el2, x14
277 msr far_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000278
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100279 ldp x16, x17, [x0, #CTX_HACR_EL2]
280 msr hacr_el2, x16
281 msr hcr_el2, x17
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000282
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100283 ldp x9, x10, [x0, #CTX_HPFAR_EL2]
284 msr hpfar_el2, x9
285 msr hstr_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000286
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100287 ldp x11, x12, [x0, #CTX_ICC_SRE_EL2]
288 msr ICC_SRE_EL2, x11
289 msr ICH_HCR_EL2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000290
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100291 ldp x13, x14, [x0, #CTX_ICH_VMCR_EL2]
292 msr ICH_VMCR_EL2, x13
293 msr mair_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000294
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100295 ldp x15, x16, [x0, #CTX_MDCR_EL2]
296 msr mdcr_el2, x15
297 msr PMSCR_EL2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000298
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100299 ldp x17, x9, [x0, #CTX_SPSR_EL2]
300 msr spsr_el2, x17
301 msr sp_el2, x9
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000302
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100303 ldp x10, x11, [x0, #CTX_TPIDR_EL2]
304 msr tpidr_el2, x10
305 msr ttbr0_el2, x11
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000306
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100307 ldp x12, x13, [x0, #CTX_VBAR_EL2]
308 msr vbar_el2, x12
309 msr vmpidr_el2, x13
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000310
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100311 ldp x14, x15, [x0, #CTX_VPIDR_EL2]
312 msr vpidr_el2, x14
313 msr vtcr_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000314
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100315 ldr x16, [x0, #CTX_VTTBR_EL2]
316 msr vttbr_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000317
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000318#if CTX_INCLUDE_MTE_REGS
Max Shvetsov0c16abd2020-05-13 18:15:39 +0100319 ldr x17, [x0, #CTX_TFSR_EL2]
320 msr TFSR_EL2, x17
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000321#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000322
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000323#if ENABLE_MPAM_FOR_LOWER_ELS
324 ldp x9, x10, [x0, #CTX_MPAM2_EL2]
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000325 msr MPAM2_EL2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000326 msr MPAMHCR_EL2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000327
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000328 ldp x11, x12, [x0, #CTX_MPAMVPM0_EL2]
329 msr MPAMVPM0_EL2, x11
330 msr MPAMVPM1_EL2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000331
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000332 ldp x13, x14, [x0, #CTX_MPAMVPM2_EL2]
333 msr MPAMVPM2_EL2, x13
334 msr MPAMVPM3_EL2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000335
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000336 ldp x15, x16, [x0, #CTX_MPAMVPM4_EL2]
337 msr MPAMVPM4_EL2, x15
338 msr MPAMVPM5_EL2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000339
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000340 ldp x17, x9, [x0, #CTX_MPAMVPM6_EL2]
341 msr MPAMVPM6_EL2, x17
342 msr MPAMVPM7_EL2, x9
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000343
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000344 ldr x10, [x0, #CTX_MPAMVPMV_EL2]
345 msr MPAMVPMV_EL2, x10
346#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000347
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000348#if ARM_ARCH_AT_LEAST(8, 6)
349 ldp x11, x12, [x0, #CTX_HAFGRTR_EL2]
350 msr HAFGRTR_EL2, x11
351 msr HDFGRTR_EL2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000352
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000353 ldp x13, x14, [x0, #CTX_HDFGWTR_EL2]
354 msr HDFGWTR_EL2, x13
355 msr HFGITR_EL2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000356
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000357 ldp x15, x16, [x0, #CTX_HFGRTR_EL2]
358 msr HFGRTR_EL2, x15
359 msr HFGWTR_EL2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000360
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000361 ldr x17, [x0, #CTX_CNTPOFF_EL2]
362 msr CNTPOFF_EL2, x17
363#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000364
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000365#if ARM_ARCH_AT_LEAST(8, 4)
366 ldp x9, x10, [x0, #CTX_CNTHPS_CTL_EL2]
367 msr cnthps_ctl_el2, x9
368 msr cnthps_cval_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000369
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000370 ldp x11, x12, [x0, #CTX_CNTHPS_TVAL_EL2]
371 msr cnthps_tval_el2, x11
372 msr cnthvs_ctl_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000373
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000374 ldp x13, x14, [x0, #CTX_CNTHVS_CVAL_EL2]
375 msr cnthvs_cval_el2, x13
376 msr cnthvs_tval_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000377
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000378 ldp x15, x16, [x0, #CTX_CNTHV_CTL_EL2]
379 msr cnthv_ctl_el2, x15
380 msr cnthv_cval_el2, x16
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000381
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000382 ldp x17, x9, [x0, #CTX_CNTHV_TVAL_EL2]
383 msr cnthv_tval_el2, x17
384 msr contextidr_el2, x9
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000385
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000386 ldr x10, [x0, #CTX_SDER32_EL2]
387 msr sder32_el2, x10
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000388
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000389 ldr x11, [x0, #CTX_TTBR1_EL2]
390 msr ttbr1_el2, x11
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000391
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000392 ldr x12, [x0, #CTX_VDISR_EL2]
393 msr vdisr_el2, x12
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000394
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000395 ldr x13, [x0, #CTX_VNCR_EL2]
396 msr vncr_el2, x13
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000397
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000398 ldr x14, [x0, #CTX_VSESR_EL2]
399 msr vsesr_el2, x14
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000400
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000401 ldr x15, [x0, #CTX_VSTCR_EL2]
402 msr vstcr_el2, x15
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000403
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000404 ldr x16, [x0, #CTX_VSTTBR_EL2]
405 msr vsttbr_el2, x16
Olivier Deprez19628912020-03-20 14:22:05 +0100406
407 ldr x17, [x0, #CTX_TRFCR_EL2]
408 msr TRFCR_EL2, x17
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000409#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000410
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000411#if ARM_ARCH_AT_LEAST(8, 5)
Olivier Deprez19628912020-03-20 14:22:05 +0100412 ldr x9, [x0, #CTX_SCXTNUM_EL2]
413 msr scxtnum_el2, x9
Max Shvetsovc9e2c922020-02-17 16:15:47 +0000414#endif
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000415
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100416#if ERRATA_SPECULATIVE_AT
417/*
418 * Make sure all registers are stored successfully except
419 * SCTLR_EL2 and TCR_EL2
420 */
421 isb
422#endif
423
424 ldr x9, [x0, #CTX_SCTLR_EL2]
425 msr sctlr_el2, x9
426 ldr x9, [x0, #CTX_TCR_EL2]
427 msr tcr_el2, x9
428
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000429 ret
430endfunc el2_sysregs_context_restore
431
432#endif /* CTX_INCLUDE_EL2_REGS */
433
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100434/* ------------------------------------------------------------------
435 * The following function strictly follows the AArch64 PCS to use
436 * x9-x17 (temporary caller-saved registers) to save EL1 system
437 * register context. It assumes that 'x0' is pointing to a
438 * 'el1_sys_regs' structure where the register context will be saved.
439 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000440 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000441func el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000442
443 mrs x9, spsr_el1
444 mrs x10, elr_el1
445 stp x9, x10, [x0, #CTX_SPSR_EL1]
446
Achin Gupta9ac63c52014-01-16 12:08:03 +0000447 mrs x15, sctlr_el1
448 mrs x16, actlr_el1
449 stp x15, x16, [x0, #CTX_SCTLR_EL1]
450
451 mrs x17, cpacr_el1
452 mrs x9, csselr_el1
453 stp x17, x9, [x0, #CTX_CPACR_EL1]
454
455 mrs x10, sp_el1
456 mrs x11, esr_el1
457 stp x10, x11, [x0, #CTX_SP_EL1]
458
459 mrs x12, ttbr0_el1
460 mrs x13, ttbr1_el1
461 stp x12, x13, [x0, #CTX_TTBR0_EL1]
462
463 mrs x14, mair_el1
464 mrs x15, amair_el1
465 stp x14, x15, [x0, #CTX_MAIR_EL1]
466
467 mrs x16, tcr_el1
468 mrs x17, tpidr_el1
469 stp x16, x17, [x0, #CTX_TCR_EL1]
470
471 mrs x9, tpidr_el0
472 mrs x10, tpidrro_el0
473 stp x9, x10, [x0, #CTX_TPIDR_EL0]
474
Achin Gupta9ac63c52014-01-16 12:08:03 +0000475 mrs x13, par_el1
476 mrs x14, far_el1
477 stp x13, x14, [x0, #CTX_PAR_EL1]
478
479 mrs x15, afsr0_el1
480 mrs x16, afsr1_el1
481 stp x15, x16, [x0, #CTX_AFSR0_EL1]
482
483 mrs x17, contextidr_el1
484 mrs x9, vbar_el1
485 stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
486
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100487 /* Save AArch32 system registers if the build has instructed so */
488#if CTX_INCLUDE_AARCH32_REGS
489 mrs x11, spsr_abt
490 mrs x12, spsr_und
491 stp x11, x12, [x0, #CTX_SPSR_ABT]
492
493 mrs x13, spsr_irq
494 mrs x14, spsr_fiq
495 stp x13, x14, [x0, #CTX_SPSR_IRQ]
496
497 mrs x15, dacr32_el2
498 mrs x16, ifsr32_el2
499 stp x15, x16, [x0, #CTX_DACR32_EL2]
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100500#endif
501
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100502 /* Save NS timer registers if the build has instructed so */
503#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000504 mrs x10, cntp_ctl_el0
505 mrs x11, cntp_cval_el0
506 stp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
507
508 mrs x12, cntv_ctl_el0
509 mrs x13, cntv_cval_el0
510 stp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
511
512 mrs x14, cntkctl_el1
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100513 str x14, [x0, #CTX_CNTKCTL_EL1]
514#endif
515
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100516 /* Save MTE system registers if the build has instructed so */
517#if CTX_INCLUDE_MTE_REGS
518 mrs x15, TFSRE0_EL1
519 mrs x16, TFSR_EL1
520 stp x15, x16, [x0, #CTX_TFSRE0_EL1]
521
522 mrs x9, RGSR_EL1
523 mrs x10, GCR_EL1
524 stp x9, x10, [x0, #CTX_RGSR_EL1]
525#endif
526
Achin Gupta9ac63c52014-01-16 12:08:03 +0000527 ret
Kévin Petita877c252015-03-24 14:03:57 +0000528endfunc el1_sysregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000529
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100530/* ------------------------------------------------------------------
531 * The following function strictly follows the AArch64 PCS to use
532 * x9-x17 (temporary caller-saved registers) to restore EL1 system
533 * register context. It assumes that 'x0' is pointing to a
534 * 'el1_sys_regs' structure from where the register context will be
535 * restored
536 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000537 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000538func el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000539
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100540#if ERRATA_SPECULATIVE_AT
541 mrs x9, tcr_el1
542 orr x9, x9, #TCR_EPD0_BIT
543 orr x9, x9, #TCR_EPD1_BIT
544 msr tcr_el1, x9
545 mrs x9, sctlr_el1
546 bic x9, x9, #SCTLR_M_BIT
547 msr sctlr_el1, x9
548 isb
549#endif
550
Achin Gupta9ac63c52014-01-16 12:08:03 +0000551 ldp x9, x10, [x0, #CTX_SPSR_EL1]
552 msr spsr_el1, x9
553 msr elr_el1, x10
554
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100555 ldr x16, [x0, #CTX_ACTLR_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000556 msr actlr_el1, x16
557
558 ldp x17, x9, [x0, #CTX_CPACR_EL1]
559 msr cpacr_el1, x17
560 msr csselr_el1, x9
561
562 ldp x10, x11, [x0, #CTX_SP_EL1]
563 msr sp_el1, x10
564 msr esr_el1, x11
565
566 ldp x12, x13, [x0, #CTX_TTBR0_EL1]
567 msr ttbr0_el1, x12
568 msr ttbr1_el1, x13
569
570 ldp x14, x15, [x0, #CTX_MAIR_EL1]
571 msr mair_el1, x14
572 msr amair_el1, x15
573
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100574 ldr x16,[x0, #CTX_TPIDR_EL1]
575 msr tpidr_el1, x16
Achin Gupta9ac63c52014-01-16 12:08:03 +0000576
577 ldp x9, x10, [x0, #CTX_TPIDR_EL0]
578 msr tpidr_el0, x9
579 msr tpidrro_el0, x10
580
Achin Gupta9ac63c52014-01-16 12:08:03 +0000581 ldp x13, x14, [x0, #CTX_PAR_EL1]
582 msr par_el1, x13
583 msr far_el1, x14
584
585 ldp x15, x16, [x0, #CTX_AFSR0_EL1]
586 msr afsr0_el1, x15
587 msr afsr1_el1, x16
588
589 ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1]
590 msr contextidr_el1, x17
591 msr vbar_el1, x9
592
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100593 /* Restore AArch32 system registers if the build has instructed so */
594#if CTX_INCLUDE_AARCH32_REGS
595 ldp x11, x12, [x0, #CTX_SPSR_ABT]
596 msr spsr_abt, x11
597 msr spsr_und, x12
598
599 ldp x13, x14, [x0, #CTX_SPSR_IRQ]
600 msr spsr_irq, x13
601 msr spsr_fiq, x14
602
603 ldp x15, x16, [x0, #CTX_DACR32_EL2]
604 msr dacr32_el2, x15
605 msr ifsr32_el2, x16
Soby Mathewd75d2ba2016-05-17 14:01:32 +0100606#endif
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100607 /* Restore NS timer registers if the build has instructed so */
608#if NS_TIMER_SWITCH
Achin Gupta9ac63c52014-01-16 12:08:03 +0000609 ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0]
610 msr cntp_ctl_el0, x10
611 msr cntp_cval_el0, x11
612
613 ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0]
614 msr cntv_ctl_el0, x12
615 msr cntv_cval_el0, x13
616
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100617 ldr x14, [x0, #CTX_CNTKCTL_EL1]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000618 msr cntkctl_el1, x14
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100619#endif
Justin Chadwell1c7c13a2019-07-18 14:25:33 +0100620 /* Restore MTE system registers if the build has instructed so */
621#if CTX_INCLUDE_MTE_REGS
622 ldp x11, x12, [x0, #CTX_TFSRE0_EL1]
623 msr TFSRE0_EL1, x11
624 msr TFSR_EL1, x12
625
626 ldp x13, x14, [x0, #CTX_RGSR_EL1]
627 msr RGSR_EL1, x13
628 msr GCR_EL1, x14
629#endif
Jeenu Viswambharand1b60152014-05-12 15:28:47 +0100630
Manish V Badarkhe2801ed42020-04-28 04:53:32 +0100631#if ERRATA_SPECULATIVE_AT
632/*
633 * Make sure all registers are stored successfully except
634 * SCTLR_EL1 and TCR_EL1
635 */
636 isb
637#endif
638
639 ldr x9, [x0, #CTX_SCTLR_EL1]
640 msr sctlr_el1, x9
641 ldr x9, [x0, #CTX_TCR_EL1]
642 msr tcr_el1, x9
643
Achin Gupta9ac63c52014-01-16 12:08:03 +0000644 /* No explict ISB required here as ERET covers it */
Achin Gupta9ac63c52014-01-16 12:08:03 +0000645 ret
Kévin Petita877c252015-03-24 14:03:57 +0000646endfunc el1_sysregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000647
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100648/* ------------------------------------------------------------------
649 * The following function follows the aapcs_64 strictly to use
650 * x9-x17 (temporary caller-saved registers according to AArch64 PCS)
651 * to save floating point register context. It assumes that 'x0' is
652 * pointing to a 'fp_regs' structure where the register context will
Achin Gupta9ac63c52014-01-16 12:08:03 +0000653 * be saved.
654 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100655 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
656 * However currently we don't use VFP registers nor set traps in
657 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000658 *
659 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100660 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000661 */
Juan Castillo258e94f2014-06-25 17:26:36 +0100662#if CTX_INCLUDE_FPREGS
Andrew Thoelke38bde412014-03-18 13:46:55 +0000663func fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000664 stp q0, q1, [x0, #CTX_FP_Q0]
665 stp q2, q3, [x0, #CTX_FP_Q2]
666 stp q4, q5, [x0, #CTX_FP_Q4]
667 stp q6, q7, [x0, #CTX_FP_Q6]
668 stp q8, q9, [x0, #CTX_FP_Q8]
669 stp q10, q11, [x0, #CTX_FP_Q10]
670 stp q12, q13, [x0, #CTX_FP_Q12]
671 stp q14, q15, [x0, #CTX_FP_Q14]
672 stp q16, q17, [x0, #CTX_FP_Q16]
673 stp q18, q19, [x0, #CTX_FP_Q18]
674 stp q20, q21, [x0, #CTX_FP_Q20]
675 stp q22, q23, [x0, #CTX_FP_Q22]
676 stp q24, q25, [x0, #CTX_FP_Q24]
677 stp q26, q27, [x0, #CTX_FP_Q26]
678 stp q28, q29, [x0, #CTX_FP_Q28]
679 stp q30, q31, [x0, #CTX_FP_Q30]
680
681 mrs x9, fpsr
682 str x9, [x0, #CTX_FP_FPSR]
683
684 mrs x10, fpcr
685 str x10, [x0, #CTX_FP_FPCR]
686
David Cunadod1a1fd42017-10-20 11:30:57 +0100687#if CTX_INCLUDE_AARCH32_REGS
688 mrs x11, fpexc32_el2
689 str x11, [x0, #CTX_FP_FPEXC32_EL2]
690#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000691 ret
Kévin Petita877c252015-03-24 14:03:57 +0000692endfunc fpregs_context_save
Achin Gupta9ac63c52014-01-16 12:08:03 +0000693
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100694/* ------------------------------------------------------------------
695 * The following function follows the aapcs_64 strictly to use x9-x17
696 * (temporary caller-saved registers according to AArch64 PCS) to
697 * restore floating point register context. It assumes that 'x0' is
698 * pointing to a 'fp_regs' structure from where the register context
Achin Gupta9ac63c52014-01-16 12:08:03 +0000699 * will be restored.
700 *
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100701 * Access to VFP registers will trap if CPTR_EL3.TFP is set.
702 * However currently we don't use VFP registers nor set traps in
703 * Trusted Firmware, and assume it's cleared.
Achin Gupta9ac63c52014-01-16 12:08:03 +0000704 *
705 * TODO: Revisit when VFP is used in secure world
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100706 * ------------------------------------------------------------------
Achin Gupta9ac63c52014-01-16 12:08:03 +0000707 */
Andrew Thoelke38bde412014-03-18 13:46:55 +0000708func fpregs_context_restore
Achin Gupta9ac63c52014-01-16 12:08:03 +0000709 ldp q0, q1, [x0, #CTX_FP_Q0]
710 ldp q2, q3, [x0, #CTX_FP_Q2]
711 ldp q4, q5, [x0, #CTX_FP_Q4]
712 ldp q6, q7, [x0, #CTX_FP_Q6]
713 ldp q8, q9, [x0, #CTX_FP_Q8]
714 ldp q10, q11, [x0, #CTX_FP_Q10]
715 ldp q12, q13, [x0, #CTX_FP_Q12]
716 ldp q14, q15, [x0, #CTX_FP_Q14]
717 ldp q16, q17, [x0, #CTX_FP_Q16]
718 ldp q18, q19, [x0, #CTX_FP_Q18]
719 ldp q20, q21, [x0, #CTX_FP_Q20]
720 ldp q22, q23, [x0, #CTX_FP_Q22]
721 ldp q24, q25, [x0, #CTX_FP_Q24]
722 ldp q26, q27, [x0, #CTX_FP_Q26]
723 ldp q28, q29, [x0, #CTX_FP_Q28]
724 ldp q30, q31, [x0, #CTX_FP_Q30]
725
726 ldr x9, [x0, #CTX_FP_FPSR]
727 msr fpsr, x9
728
Soby Mathewe77e1162015-12-03 09:42:50 +0000729 ldr x10, [x0, #CTX_FP_FPCR]
Achin Gupta9ac63c52014-01-16 12:08:03 +0000730 msr fpcr, x10
731
David Cunadod1a1fd42017-10-20 11:30:57 +0100732#if CTX_INCLUDE_AARCH32_REGS
733 ldr x11, [x0, #CTX_FP_FPEXC32_EL2]
734 msr fpexc32_el2, x11
735#endif
Achin Gupta9ac63c52014-01-16 12:08:03 +0000736 /*
737 * No explict ISB required here as ERET to
Sandrine Bailleuxf4119ec2015-12-17 13:58:58 +0000738 * switch to secure EL1 or non-secure world
Achin Gupta9ac63c52014-01-16 12:08:03 +0000739 * covers it
740 */
741
742 ret
Kévin Petita877c252015-03-24 14:03:57 +0000743endfunc fpregs_context_restore
Juan Castillo258e94f2014-06-25 17:26:36 +0100744#endif /* CTX_INCLUDE_FPREGS */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100745
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100746/* ------------------------------------------------------------------
747 * The following function is used to save and restore all the general
748 * purpose and ARMv8.3-PAuth (if enabled) registers.
749 * It also checks if Secure Cycle Counter is not disabled in MDCR_EL3
750 * when ARMv8.5-PMU is implemented, and if called from Non-secure
751 * state saves PMCR_EL0 and disables Cycle Counter.
752 *
753 * Ideally we would only save and restore the callee saved registers
754 * when a world switch occurs but that type of implementation is more
755 * complex. So currently we will always save and restore these
756 * registers on entry and exit of EL3.
757 * These are not macros to ensure their invocation fits within the 32
758 * instructions per exception vector.
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100759 * clobbers: x18
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100760 * ------------------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100761 */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100762func save_gp_pmcr_pauth_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100763 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
764 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
765 stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
766 stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
767 stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
768 stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
769 stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
770 stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
771 stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
772 stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
773 stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
774 stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
775 stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
776 stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
777 stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
778 mrs x18, sp_el0
779 str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100780
781 /* ----------------------------------------------------------
782 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
783 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
784 * should be saved in non-secure context.
785 * ----------------------------------------------------------
786 */
787 mrs x9, mdcr_el3
788 tst x9, #MDCR_SCCD_BIT
789 bne 1f
790
791 /* Secure Cycle Counter is not disabled */
792 mrs x9, pmcr_el0
793
794 /* Check caller's security state */
795 mrs x10, scr_el3
796 tst x10, #SCR_NS_BIT
797 beq 2f
798
799 /* Save PMCR_EL0 if called from Non-secure state */
800 str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
801
802 /* Disable cycle counter when event counting is prohibited */
8032: orr x9, x9, #PMCR_EL0_DP_BIT
804 msr pmcr_el0, x9
805 isb
8061:
807#if CTX_INCLUDE_PAUTH_REGS
808 /* ----------------------------------------------------------
809 * Save the ARMv8.3-PAuth keys as they are not banked
810 * by exception level
811 * ----------------------------------------------------------
812 */
813 add x19, sp, #CTX_PAUTH_REGS_OFFSET
814
815 mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */
816 mrs x21, APIAKeyHi_EL1
817 mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */
818 mrs x23, APIBKeyHi_EL1
819 mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */
820 mrs x25, APDAKeyHi_EL1
821 mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */
822 mrs x27, APDBKeyHi_EL1
823 mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */
824 mrs x29, APGAKeyHi_EL1
825
826 stp x20, x21, [x19, #CTX_PACIAKEY_LO]
827 stp x22, x23, [x19, #CTX_PACIBKEY_LO]
828 stp x24, x25, [x19, #CTX_PACDAKEY_LO]
829 stp x26, x27, [x19, #CTX_PACDBKEY_LO]
830 stp x28, x29, [x19, #CTX_PACGAKEY_LO]
831#endif /* CTX_INCLUDE_PAUTH_REGS */
832
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100833 ret
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100834endfunc save_gp_pmcr_pauth_regs
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100835
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100836/* ------------------------------------------------------------------
837 * This function restores ARMv8.3-PAuth (if enabled) and all general
838 * purpose registers except x30 from the CPU context.
839 * x30 register must be explicitly restored by the caller.
840 * ------------------------------------------------------------------
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000841 */
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100842func restore_gp_pmcr_pauth_regs
843#if CTX_INCLUDE_PAUTH_REGS
844 /* Restore the ARMv8.3 PAuth keys */
845 add x10, sp, #CTX_PAUTH_REGS_OFFSET
846
847 ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */
848 ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */
849 ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */
850 ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */
851 ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */
852
853 msr APIAKeyLo_EL1, x0
854 msr APIAKeyHi_EL1, x1
855 msr APIBKeyLo_EL1, x2
856 msr APIBKeyHi_EL1, x3
857 msr APDAKeyLo_EL1, x4
858 msr APDAKeyHi_EL1, x5
859 msr APDBKeyLo_EL1, x6
860 msr APDBKeyHi_EL1, x7
861 msr APGAKeyLo_EL1, x8
862 msr APGAKeyHi_EL1, x9
863#endif /* CTX_INCLUDE_PAUTH_REGS */
864
865 /* ----------------------------------------------------------
866 * Restore PMCR_EL0 when returning to Non-secure state if
867 * Secure Cycle Counter is not disabled in MDCR_EL3 when
868 * ARMv8.5-PMU is implemented.
869 * ----------------------------------------------------------
870 */
871 mrs x0, scr_el3
872 tst x0, #SCR_NS_BIT
873 beq 2f
874
875 /* ----------------------------------------------------------
876 * Back to Non-secure state.
877 * Check if earlier initialization MDCR_EL3.SCCD to 1 failed,
878 * meaning that ARMv8-PMU is not implemented and PMCR_EL0
879 * should be restored from non-secure context.
880 * ----------------------------------------------------------
881 */
882 mrs x0, mdcr_el3
883 tst x0, #MDCR_SCCD_BIT
884 bne 2f
885 ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0]
886 msr pmcr_el0, x0
8872:
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100888 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
889 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100890 ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
891 ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
892 ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
893 ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
894 ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
895 ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000896 ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100897 ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
898 ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
899 ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
900 ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
901 ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000902 ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
903 msr sp_el0, x28
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100904 ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000905 ret
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100906endfunc restore_gp_pmcr_pauth_regs
Jeenu Viswambharan23d05a82017-11-29 16:59:34 +0000907
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100908/* ------------------------------------------------------------------
909 * This routine assumes that the SP_EL3 is pointing to a valid
910 * context structure from where the gp regs and other special
911 * registers can be retrieved.
912 * ------------------------------------------------------------------
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +0000913 */
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100914func el3_exit
Jan Dabrosfa015982019-12-02 13:30:03 +0100915#if ENABLE_ASSERTIONS
916 /* el3_exit assumes SP_EL0 on entry */
917 mrs x17, spsel
918 cmp x17, #MODE_SP_EL0
919 ASM_ASSERT(eq)
920#endif
921
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100922 /* ----------------------------------------------------------
923 * Save the current SP_EL0 i.e. the EL3 runtime stack which
924 * will be used for handling the next SMC.
925 * Then switch to SP_EL3.
926 * ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100927 */
928 mov x17, sp
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100929 msr spsel, #MODE_SP_ELX
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100930 str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP]
931
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100932 /* ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100933 * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100934 * ----------------------------------------------------------
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100935 */
936 ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3]
937 ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3]
938 msr scr_el3, x18
939 msr spsr_el3, x16
940 msr elr_el3, x17
941
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100942#if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100943 /* ----------------------------------------------------------
944 * Restore mitigation state as it was on entry to EL3
945 * ----------------------------------------------------------
946 */
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100947 ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100948 cbz x17, 1f
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100949 blr x17
Antonio Nino Diaz13adfb12019-01-30 20:41:31 +00009501:
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100951#endif
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100952 /* ----------------------------------------------------------
953 * Restore general purpose (including x30), PMCR_EL0 and
954 * ARMv8.3-PAuth registers.
955 * Exit EL3 via ERET to a lower exception level.
956 * ----------------------------------------------------------
957 */
958 bl restore_gp_pmcr_pauth_regs
959 ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
Dimitris Papastamosba51d9e2018-05-16 11:36:14 +0100960
Alexei Fedorovf41355c2019-09-13 14:11:59 +0100961#if IMAGE_BL31 && RAS_EXTENSION
962 /* ----------------------------------------------------------
963 * Issue Error Synchronization Barrier to synchronize SErrors
964 * before exiting EL3. We're running with EAs unmasked, so
965 * any synchronized errors would be taken immediately;
966 * therefore no need to inspect DISR_EL1 register.
967 * ----------------------------------------------------------
968 */
969 esb
Antonio Nino Diaz594811b2019-01-31 11:58:00 +0000970#endif
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -0800971 exception_return
Antonio Nino Diaz594811b2019-01-31 11:58:00 +0000972
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100973endfunc el3_exit