Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 1 | /* |
Rohit Mathew | 3dc3cad | 2022-11-11 18:45:11 +0000 | [diff] [blame] | 2 | * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 3 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 4 | * SPDX-License-Identifier: BSD-3-Clause |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 5 | */ |
| 6 | |
Dan Handley | 2bd4ef2 | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 7 | #include <arch.h> |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 8 | #include <asm_macros.S> |
Jan Dabros | fa01598 | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 9 | #include <assert_macros.S> |
Dan Handley | 2bd4ef2 | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 10 | #include <context.h> |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 11 | #include <el3_common_macros.S> |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 12 | |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 13 | #if CTX_INCLUDE_EL2_REGS |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 14 | .global el2_sysregs_context_save_common |
| 15 | .global el2_sysregs_context_restore_common |
| 16 | #if ENABLE_SPE_FOR_LOWER_ELS |
| 17 | .global el2_sysregs_context_save_spe |
| 18 | .global el2_sysregs_context_restore_spe |
| 19 | #endif /* ENABLE_SPE_FOR_LOWER_ELS */ |
| 20 | #if CTX_INCLUDE_MTE_REGS |
| 21 | .global el2_sysregs_context_save_mte |
| 22 | .global el2_sysregs_context_restore_mte |
| 23 | #endif /* CTX_INCLUDE_MTE_REGS */ |
| 24 | #if ENABLE_MPAM_FOR_LOWER_ELS |
| 25 | .global el2_sysregs_context_save_mpam |
| 26 | .global el2_sysregs_context_restore_mpam |
| 27 | #endif /* ENABLE_MPAM_FOR_LOWER_ELS */ |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 28 | #if ENABLE_FEAT_ECV |
| 29 | .global el2_sysregs_context_save_ecv |
| 30 | .global el2_sysregs_context_restore_ecv |
| 31 | #endif /* ENABLE_FEAT_ECV */ |
| 32 | #if ENABLE_FEAT_VHE |
| 33 | .global el2_sysregs_context_save_vhe |
| 34 | .global el2_sysregs_context_restore_vhe |
| 35 | #endif /* ENABLE_FEAT_VHE */ |
| 36 | #if RAS_EXTENSION |
| 37 | .global el2_sysregs_context_save_ras |
| 38 | .global el2_sysregs_context_restore_ras |
| 39 | #endif /* RAS_EXTENSION */ |
| 40 | #if CTX_INCLUDE_NEVE_REGS |
| 41 | .global el2_sysregs_context_save_nv2 |
| 42 | .global el2_sysregs_context_restore_nv2 |
| 43 | #endif /* CTX_INCLUDE_NEVE_REGS */ |
| 44 | #if ENABLE_TRF_FOR_NS |
| 45 | .global el2_sysregs_context_save_trf |
| 46 | .global el2_sysregs_context_restore_trf |
| 47 | #endif /* ENABLE_TRF_FOR_NS */ |
| 48 | #if ENABLE_FEAT_CSV2_2 |
| 49 | .global el2_sysregs_context_save_csv2 |
| 50 | .global el2_sysregs_context_restore_csv2 |
| 51 | #endif /* ENABLE_FEAT_CSV2_2 */ |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 52 | #endif /* CTX_INCLUDE_EL2_REGS */ |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 53 | |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 54 | .global el1_sysregs_context_save |
| 55 | .global el1_sysregs_context_restore |
| 56 | #if CTX_INCLUDE_FPREGS |
| 57 | .global fpregs_context_save |
| 58 | .global fpregs_context_restore |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 59 | #endif /* CTX_INCLUDE_FPREGS */ |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 60 | .global prepare_el3_entry |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 61 | .global restore_gp_pmcr_pauth_regs |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 62 | .global save_and_update_ptw_el1_sys_regs |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 63 | .global el3_exit |
| 64 | |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 65 | #if CTX_INCLUDE_EL2_REGS |
| 66 | |
| 67 | /* ----------------------------------------------------- |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 68 | * The following functions strictly follow the AArch64 |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 69 | * PCS to use x9-x16 (temporary caller-saved registers) |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 70 | * to save/restore EL2 system register context. |
| 71 | * el2_sysregs_context_save/restore_common functions |
| 72 | * save and restore registers that are common to all |
| 73 | * configurations. The rest of the functions save and |
| 74 | * restore EL2 system registers that are present when a |
| 75 | * particular feature is enabled. All functions assume |
| 76 | * that 'x0' is pointing to a 'el2_sys_regs' structure |
| 77 | * where the register context will be saved/restored. |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 78 | * |
| 79 | * The following registers are not added. |
| 80 | * AMEVCNTVOFF0<n>_EL2 |
| 81 | * AMEVCNTVOFF1<n>_EL2 |
| 82 | * ICH_AP0R<n>_EL2 |
| 83 | * ICH_AP1R<n>_EL2 |
| 84 | * ICH_LR<n>_EL2 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 85 | * ----------------------------------------------------- |
| 86 | */ |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 87 | func el2_sysregs_context_save_common |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 88 | mrs x9, actlr_el2 |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 89 | mrs x10, afsr0_el2 |
| 90 | stp x9, x10, [x0, #CTX_ACTLR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 91 | |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 92 | mrs x11, afsr1_el2 |
| 93 | mrs x12, amair_el2 |
| 94 | stp x11, x12, [x0, #CTX_AFSR1_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 95 | |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 96 | mrs x13, cnthctl_el2 |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 97 | mrs x14, cntvoff_el2 |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 98 | stp x13, x14, [x0, #CTX_CNTHCTL_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 99 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 100 | mrs x15, cptr_el2 |
| 101 | str x15, [x0, #CTX_CPTR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 102 | |
Arunachalam Ganapathy | dca591b | 2020-05-26 11:32:35 +0100 | [diff] [blame] | 103 | #if CTX_INCLUDE_AARCH32_REGS |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 104 | mrs x16, dbgvcr32_el2 |
| 105 | str x16, [x0, #CTX_DBGVCR32_EL2] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 106 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 107 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 108 | mrs x9, elr_el2 |
| 109 | mrs x10, esr_el2 |
| 110 | stp x9, x10, [x0, #CTX_ELR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 111 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 112 | mrs x11, far_el2 |
| 113 | mrs x12, hacr_el2 |
| 114 | stp x11, x12, [x0, #CTX_FAR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 115 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 116 | mrs x13, hcr_el2 |
| 117 | mrs x14, hpfar_el2 |
| 118 | stp x13, x14, [x0, #CTX_HCR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 119 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 120 | mrs x15, hstr_el2 |
| 121 | mrs x16, ICC_SRE_EL2 |
| 122 | stp x15, x16, [x0, #CTX_HSTR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 123 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 124 | mrs x9, ICH_HCR_EL2 |
| 125 | mrs x10, ICH_VMCR_EL2 |
| 126 | stp x9, x10, [x0, #CTX_ICH_HCR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 127 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 128 | mrs x11, mair_el2 |
| 129 | mrs x12, mdcr_el2 |
| 130 | stp x11, x12, [x0, #CTX_MAIR_EL2] |
| 131 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 132 | mrs x14, sctlr_el2 |
| 133 | str x14, [x0, #CTX_SCTLR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 134 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 135 | mrs x15, spsr_el2 |
| 136 | mrs x16, sp_el2 |
| 137 | stp x15, x16, [x0, #CTX_SPSR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 138 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 139 | mrs x9, tcr_el2 |
| 140 | mrs x10, tpidr_el2 |
| 141 | stp x9, x10, [x0, #CTX_TCR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 142 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 143 | mrs x11, ttbr0_el2 |
| 144 | mrs x12, vbar_el2 |
| 145 | stp x11, x12, [x0, #CTX_TTBR0_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 146 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 147 | mrs x13, vmpidr_el2 |
| 148 | mrs x14, vpidr_el2 |
| 149 | stp x13, x14, [x0, #CTX_VMPIDR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 150 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 151 | mrs x15, vtcr_el2 |
| 152 | mrs x16, vttbr_el2 |
| 153 | stp x15, x16, [x0, #CTX_VTCR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 154 | ret |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 155 | endfunc el2_sysregs_context_save_common |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 156 | |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 157 | func el2_sysregs_context_restore_common |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 158 | ldp x9, x10, [x0, #CTX_ACTLR_EL2] |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 159 | msr actlr_el2, x9 |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 160 | msr afsr0_el2, x10 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 161 | |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 162 | ldp x11, x12, [x0, #CTX_AFSR1_EL2] |
| 163 | msr afsr1_el2, x11 |
| 164 | msr amair_el2, x12 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 165 | |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 166 | ldp x13, x14, [x0, #CTX_CNTHCTL_EL2] |
| 167 | msr cnthctl_el2, x13 |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 168 | msr cntvoff_el2, x14 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 169 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 170 | ldr x15, [x0, #CTX_CPTR_EL2] |
| 171 | msr cptr_el2, x15 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 172 | |
Arunachalam Ganapathy | dca591b | 2020-05-26 11:32:35 +0100 | [diff] [blame] | 173 | #if CTX_INCLUDE_AARCH32_REGS |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 174 | ldr x16, [x0, #CTX_DBGVCR32_EL2] |
| 175 | msr dbgvcr32_el2, x16 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 176 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 177 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 178 | ldp x9, x10, [x0, #CTX_ELR_EL2] |
| 179 | msr elr_el2, x9 |
| 180 | msr esr_el2, x10 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 181 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 182 | ldp x11, x12, [x0, #CTX_FAR_EL2] |
| 183 | msr far_el2, x11 |
| 184 | msr hacr_el2, x12 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 185 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 186 | ldp x13, x14, [x0, #CTX_HCR_EL2] |
| 187 | msr hcr_el2, x13 |
| 188 | msr hpfar_el2, x14 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 189 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 190 | ldp x15, x16, [x0, #CTX_HSTR_EL2] |
| 191 | msr hstr_el2, x15 |
| 192 | msr ICC_SRE_EL2, x16 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 193 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 194 | ldp x9, x10, [x0, #CTX_ICH_HCR_EL2] |
| 195 | msr ICH_HCR_EL2, x9 |
| 196 | msr ICH_VMCR_EL2, x10 |
| 197 | |
| 198 | ldp x11, x12, [x0, #CTX_MAIR_EL2] |
| 199 | msr mair_el2, x11 |
| 200 | msr mdcr_el2, x12 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 201 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 202 | ldr x14, [x0, #CTX_SCTLR_EL2] |
| 203 | msr sctlr_el2, x14 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 204 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 205 | ldp x15, x16, [x0, #CTX_SPSR_EL2] |
| 206 | msr spsr_el2, x15 |
| 207 | msr sp_el2, x16 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 208 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 209 | ldp x9, x10, [x0, #CTX_TCR_EL2] |
| 210 | msr tcr_el2, x9 |
| 211 | msr tpidr_el2, x10 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 212 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 213 | ldp x11, x12, [x0, #CTX_TTBR0_EL2] |
| 214 | msr ttbr0_el2, x11 |
| 215 | msr vbar_el2, x12 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 216 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 217 | ldp x13, x14, [x0, #CTX_VMPIDR_EL2] |
| 218 | msr vmpidr_el2, x13 |
| 219 | msr vpidr_el2, x14 |
Manish V Badarkhe | d73c1ba | 2020-07-28 07:12:56 +0100 | [diff] [blame] | 220 | |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 221 | ldp x15, x16, [x0, #CTX_VTCR_EL2] |
| 222 | msr vtcr_el2, x15 |
| 223 | msr vttbr_el2, x16 |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 224 | ret |
| 225 | endfunc el2_sysregs_context_restore_common |
| 226 | |
| 227 | #if ENABLE_SPE_FOR_LOWER_ELS |
| 228 | func el2_sysregs_context_save_spe |
| 229 | mrs x13, PMSCR_EL2 |
| 230 | str x13, [x0, #CTX_PMSCR_EL2] |
| 231 | ret |
| 232 | endfunc el2_sysregs_context_save_spe |
| 233 | |
| 234 | func el2_sysregs_context_restore_spe |
| 235 | ldr x13, [x0, #CTX_PMSCR_EL2] |
| 236 | msr PMSCR_EL2, x13 |
| 237 | ret |
| 238 | endfunc el2_sysregs_context_restore_spe |
| 239 | #endif /* ENABLE_SPE_FOR_LOWER_ELS */ |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 240 | |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 241 | #if CTX_INCLUDE_MTE_REGS |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 242 | func el2_sysregs_context_save_mte |
| 243 | mrs x9, TFSR_EL2 |
| 244 | str x9, [x0, #CTX_TFSR_EL2] |
| 245 | ret |
| 246 | endfunc el2_sysregs_context_save_mte |
| 247 | |
| 248 | func el2_sysregs_context_restore_mte |
Manish V Badarkhe | d73c1ba | 2020-07-28 07:12:56 +0100 | [diff] [blame] | 249 | ldr x9, [x0, #CTX_TFSR_EL2] |
| 250 | msr TFSR_EL2, x9 |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 251 | ret |
| 252 | endfunc el2_sysregs_context_restore_mte |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 253 | #endif /* CTX_INCLUDE_MTE_REGS */ |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 254 | |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 255 | #if ENABLE_MPAM_FOR_LOWER_ELS |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 256 | func el2_sysregs_context_save_mpam |
| 257 | mrs x10, MPAM2_EL2 |
| 258 | str x10, [x0, #CTX_MPAM2_EL2] |
| 259 | |
Rohit Mathew | 3dc3cad | 2022-11-11 18:45:11 +0000 | [diff] [blame] | 260 | mrs x10, MPAMIDR_EL1 |
| 261 | |
| 262 | /* |
| 263 | * The context registers that we intend to save would be part of the |
| 264 | * PE's system register frame only if MPAMIDR_EL1.HAS_HCR == 1. |
| 265 | */ |
| 266 | tbz w10, #MPAMIDR_EL1_HAS_HCR_SHIFT, 3f |
| 267 | |
| 268 | /* |
| 269 | * MPAMHCR_EL2, MPAMVPMV_EL2 and MPAMVPM0_EL2 would be present in the |
| 270 | * system register frame if MPAMIDR_EL1.HAS_HCR == 1. Proceed to save |
| 271 | * the context of these registers. |
| 272 | */ |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 273 | mrs x11, MPAMHCR_EL2 |
| 274 | mrs x12, MPAMVPM0_EL2 |
| 275 | stp x11, x12, [x0, #CTX_MPAMHCR_EL2] |
| 276 | |
Rohit Mathew | 3dc3cad | 2022-11-11 18:45:11 +0000 | [diff] [blame] | 277 | mrs x13, MPAMVPMV_EL2 |
| 278 | str x13, [x0, #CTX_MPAMVPMV_EL2] |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 279 | |
Rohit Mathew | 3dc3cad | 2022-11-11 18:45:11 +0000 | [diff] [blame] | 280 | /* |
| 281 | * MPAMIDR_EL1.VPMR_MAX has to be probed to obtain the maximum supported |
| 282 | * VPMR value. Proceed to save the context of registers from |
| 283 | * MPAMVPM1_EL2 to MPAMVPM<x>_EL2 where x is VPMR_MAX. From MPAM spec, |
| 284 | * VPMR_MAX should not be zero if HAS_HCR == 1. |
| 285 | */ |
| 286 | ubfx x10, x10, #MPAMIDR_EL1_VPMR_MAX_SHIFT, \ |
| 287 | #MPAMIDR_EL1_VPMR_MAX_WIDTH |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 288 | |
Rohit Mathew | 3dc3cad | 2022-11-11 18:45:11 +0000 | [diff] [blame] | 289 | /* |
| 290 | * Once VPMR_MAX has been identified, calculate the offset relative to |
| 291 | * PC to jump to so that relevant context can be saved. The offset is |
| 292 | * calculated as (VPMR_POSSIBLE_MAX - VPMR_MAX) * (instruction size for |
| 293 | * saving one VPM register) + (absolute address of label "1"). |
| 294 | */ |
| 295 | mov w11, #MPAMIDR_EL1_VPMR_MAX_POSSIBLE |
| 296 | sub w10, w11, w10 |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 297 | |
Rohit Mathew | 3dc3cad | 2022-11-11 18:45:11 +0000 | [diff] [blame] | 298 | /* Calculate the size of one block of MPAMVPM*_EL2 save */ |
| 299 | adr x11, 1f |
| 300 | adr x12, 2f |
| 301 | sub x12, x12, x11 |
| 302 | |
| 303 | madd x10, x10, x12, x11 |
| 304 | br x10 |
| 305 | |
| 306 | /* |
| 307 | * The branch above would land properly on one of the blocks following |
| 308 | * label "1". Make sure that the order of save is retained. |
| 309 | */ |
| 310 | 1: |
| 311 | #if ENABLE_BTI |
| 312 | bti j |
| 313 | #endif |
| 314 | mrs x10, MPAMVPM7_EL2 |
| 315 | str x10, [x0, #CTX_MPAMVPM7_EL2] |
| 316 | 2: |
| 317 | #if ENABLE_BTI |
| 318 | bti j |
| 319 | #endif |
| 320 | mrs x11, MPAMVPM6_EL2 |
| 321 | str x11, [x0, #CTX_MPAMVPM6_EL2] |
| 322 | |
| 323 | #if ENABLE_BTI |
| 324 | bti j |
| 325 | #endif |
| 326 | mrs x12, MPAMVPM5_EL2 |
| 327 | str x12, [x0, #CTX_MPAMVPM5_EL2] |
| 328 | |
| 329 | #if ENABLE_BTI |
| 330 | bti j |
| 331 | #endif |
| 332 | mrs x13, MPAMVPM4_EL2 |
| 333 | str x13, [x0, #CTX_MPAMVPM4_EL2] |
| 334 | |
| 335 | #if ENABLE_BTI |
| 336 | bti j |
| 337 | #endif |
| 338 | mrs x14, MPAMVPM3_EL2 |
| 339 | str x14, [x0, #CTX_MPAMVPM3_EL2] |
| 340 | |
| 341 | #if ENABLE_BTI |
| 342 | bti j |
| 343 | #endif |
| 344 | mrs x15, MPAMVPM2_EL2 |
| 345 | str x15, [x0, #CTX_MPAMVPM2_EL2] |
| 346 | |
| 347 | #if ENABLE_BTI |
| 348 | bti j |
| 349 | #endif |
| 350 | mrs x16, MPAMVPM1_EL2 |
| 351 | str x16, [x0, #CTX_MPAMVPM1_EL2] |
| 352 | |
| 353 | 3: ret |
Rohit Mathew | ca91d27 | 2022-10-18 22:57:06 +0100 | [diff] [blame] | 354 | endfunc el2_sysregs_context_save_mpam |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 355 | |
| 356 | func el2_sysregs_context_restore_mpam |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 357 | ldr x10, [x0, #CTX_MPAM2_EL2] |
Manish V Badarkhe | d73c1ba | 2020-07-28 07:12:56 +0100 | [diff] [blame] | 358 | msr MPAM2_EL2, x10 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 359 | |
Rohit Mathew | 3dc3cad | 2022-11-11 18:45:11 +0000 | [diff] [blame] | 360 | mrs x10, MPAMIDR_EL1 |
| 361 | /* |
| 362 | * The context registers that we intend to restore would be part of the |
| 363 | * PE's system register frame only if MPAMIDR_EL1.HAS_HCR == 1. |
| 364 | */ |
| 365 | tbz w10, #MPAMIDR_EL1_HAS_HCR_SHIFT, 3f |
| 366 | |
| 367 | /* |
| 368 | * MPAMHCR_EL2, MPAMVPMV_EL2 and MPAMVPM0_EL2 would be present in the |
| 369 | * system register frame if MPAMIDR_EL1.HAS_HCR == 1. Proceed to restore |
| 370 | * the context of these registers |
| 371 | */ |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 372 | ldp x11, x12, [x0, #CTX_MPAMHCR_EL2] |
| 373 | msr MPAMHCR_EL2, x11 |
Manish V Badarkhe | d73c1ba | 2020-07-28 07:12:56 +0100 | [diff] [blame] | 374 | msr MPAMVPM0_EL2, x12 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 375 | |
Rohit Mathew | 3dc3cad | 2022-11-11 18:45:11 +0000 | [diff] [blame] | 376 | ldr x13, [x0, #CTX_MPAMVPMV_EL2] |
| 377 | msr MPAMVPMV_EL2, x13 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 378 | |
Rohit Mathew | 3dc3cad | 2022-11-11 18:45:11 +0000 | [diff] [blame] | 379 | /* |
| 380 | * MPAMIDR_EL1.VPMR_MAX has to be probed to obtain the maximum supported |
| 381 | * VPMR value. Proceed to restore the context of registers from |
| 382 | * MPAMVPM1_EL2 to MPAMVPM<x>_EL2 where x is VPMR_MAX. from MPAM spec, |
| 383 | * VPMR_MAX should not be zero if HAS_HCR == 1. |
| 384 | */ |
| 385 | ubfx x10, x10, #MPAMIDR_EL1_VPMR_MAX_SHIFT, \ |
| 386 | #MPAMIDR_EL1_VPMR_MAX_WIDTH |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 387 | |
Rohit Mathew | 3dc3cad | 2022-11-11 18:45:11 +0000 | [diff] [blame] | 388 | /* |
| 389 | * Once VPMR_MAX has been identified, calculate the offset relative to |
| 390 | * PC to jump to so that relevant context can be restored. The offset is |
| 391 | * calculated as (VPMR_POSSIBLE_MAX - VPMR_MAX) * (instruction size for |
| 392 | * restoring one VPM register) + (absolute address of label "1"). |
| 393 | */ |
| 394 | mov w11, #MPAMIDR_EL1_VPMR_MAX_POSSIBLE |
| 395 | sub w10, w11, w10 |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 396 | |
Rohit Mathew | 3dc3cad | 2022-11-11 18:45:11 +0000 | [diff] [blame] | 397 | /* Calculate the size of one block of MPAMVPM*_EL2 restore */ |
| 398 | adr x11, 1f |
| 399 | adr x12, 2f |
| 400 | sub x12, x12, x11 |
| 401 | |
| 402 | madd x10, x10, x12, x11 |
| 403 | br x10 |
| 404 | |
| 405 | /* |
| 406 | * The branch above would land properly on one of the blocks following |
| 407 | * label "1". Make sure that the order of restore is retained. |
| 408 | */ |
| 409 | 1: |
| 410 | |
| 411 | #if ENABLE_BTI |
| 412 | bti j |
| 413 | #endif |
| 414 | ldr x10, [x0, #CTX_MPAMVPM7_EL2] |
| 415 | msr MPAMVPM7_EL2, x10 |
| 416 | 2: |
| 417 | #if ENABLE_BTI |
| 418 | bti j |
| 419 | #endif |
| 420 | ldr x11, [x0, #CTX_MPAMVPM6_EL2] |
| 421 | msr MPAMVPM6_EL2, x11 |
| 422 | |
| 423 | #if ENABLE_BTI |
| 424 | bti j |
| 425 | #endif |
| 426 | ldr x12, [x0, #CTX_MPAMVPM5_EL2] |
| 427 | msr MPAMVPM5_EL2, x12 |
| 428 | |
| 429 | #if ENABLE_BTI |
| 430 | bti j |
| 431 | #endif |
| 432 | ldr x13, [x0, #CTX_MPAMVPM4_EL2] |
| 433 | msr MPAMVPM4_EL2, x13 |
| 434 | |
| 435 | #if ENABLE_BTI |
| 436 | bti j |
| 437 | #endif |
| 438 | ldr x14, [x0, #CTX_MPAMVPM3_EL2] |
| 439 | msr MPAMVPM3_EL2, x14 |
| 440 | |
| 441 | #if ENABLE_BTI |
| 442 | bti j |
| 443 | #endif |
| 444 | ldr x15, [x0, #CTX_MPAMVPM2_EL2] |
| 445 | msr MPAMVPM2_EL2, x15 |
| 446 | |
| 447 | #if ENABLE_BTI |
| 448 | bti j |
| 449 | #endif |
| 450 | ldr x16, [x0, #CTX_MPAMVPM1_EL2] |
| 451 | msr MPAMVPM1_EL2, x16 |
| 452 | |
| 453 | 3: ret |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 454 | endfunc el2_sysregs_context_restore_mpam |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 455 | #endif /* ENABLE_MPAM_FOR_LOWER_ELS */ |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 456 | |
Jayanth Dodderi Chidanand | 13ae0f4 | 2021-11-25 14:59:30 +0000 | [diff] [blame] | 457 | #if ENABLE_FEAT_ECV |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 458 | func el2_sysregs_context_save_ecv |
| 459 | mrs x11, CNTPOFF_EL2 |
| 460 | str x11, [x0, #CTX_CNTPOFF_EL2] |
| 461 | ret |
| 462 | endfunc el2_sysregs_context_save_ecv |
| 463 | |
| 464 | func el2_sysregs_context_restore_ecv |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 465 | ldr x11, [x0, #CTX_CNTPOFF_EL2] |
| 466 | msr CNTPOFF_EL2, x11 |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 467 | ret |
| 468 | endfunc el2_sysregs_context_restore_ecv |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 469 | #endif /* ENABLE_FEAT_ECV */ |
| 470 | |
| 471 | #if ENABLE_FEAT_VHE |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 472 | func el2_sysregs_context_save_vhe |
| 473 | /* |
| 474 | * CONTEXTIDR_EL2 register is saved only when FEAT_VHE or |
| 475 | * FEAT_Debugv8p2 (currently not in TF-A) is supported. |
| 476 | */ |
| 477 | mrs x9, contextidr_el2 |
| 478 | mrs x10, ttbr1_el2 |
| 479 | stp x9, x10, [x0, #CTX_CONTEXTIDR_EL2] |
| 480 | ret |
| 481 | endfunc el2_sysregs_context_save_vhe |
| 482 | |
| 483 | func el2_sysregs_context_restore_vhe |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 484 | /* |
| 485 | * CONTEXTIDR_EL2 register is restored only when FEAT_VHE or |
| 486 | * FEAT_Debugv8p2 (currently not in TF-A) is supported. |
| 487 | */ |
| 488 | ldp x9, x10, [x0, #CTX_CONTEXTIDR_EL2] |
| 489 | msr contextidr_el2, x9 |
| 490 | msr ttbr1_el2, x10 |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 491 | ret |
| 492 | endfunc el2_sysregs_context_restore_vhe |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 493 | #endif /* ENABLE_FEAT_VHE */ |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 494 | |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 495 | #if RAS_EXTENSION |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 496 | func el2_sysregs_context_save_ras |
| 497 | /* |
| 498 | * VDISR_EL2 and VSESR_EL2 registers are saved only when |
| 499 | * FEAT_RAS is supported. |
| 500 | */ |
| 501 | mrs x11, vdisr_el2 |
| 502 | mrs x12, vsesr_el2 |
| 503 | stp x11, x12, [x0, #CTX_VDISR_EL2] |
| 504 | ret |
| 505 | endfunc el2_sysregs_context_save_ras |
| 506 | |
| 507 | func el2_sysregs_context_restore_ras |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 508 | /* |
| 509 | * VDISR_EL2 and VSESR_EL2 registers are restored only when FEAT_RAS |
| 510 | * is supported. |
| 511 | */ |
| 512 | ldp x11, x12, [x0, #CTX_VDISR_EL2] |
| 513 | msr vdisr_el2, x11 |
| 514 | msr vsesr_el2, x12 |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 515 | ret |
| 516 | endfunc el2_sysregs_context_restore_ras |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 517 | #endif /* RAS_EXTENSION */ |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 518 | |
Arunachalam Ganapathy | dd3ec7e | 2020-05-28 11:57:09 +0100 | [diff] [blame] | 519 | #if CTX_INCLUDE_NEVE_REGS |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 520 | func el2_sysregs_context_save_nv2 |
| 521 | /* |
| 522 | * VNCR_EL2 register is saved only when FEAT_NV2 is supported. |
| 523 | */ |
| 524 | mrs x16, vncr_el2 |
| 525 | str x16, [x0, #CTX_VNCR_EL2] |
| 526 | ret |
| 527 | endfunc el2_sysregs_context_save_nv2 |
| 528 | |
| 529 | func el2_sysregs_context_restore_nv2 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 530 | /* |
| 531 | * VNCR_EL2 register is restored only when FEAT_NV2 is supported. |
| 532 | */ |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 533 | ldr x16, [x0, #CTX_VNCR_EL2] |
| 534 | msr vncr_el2, x16 |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 535 | ret |
| 536 | endfunc el2_sysregs_context_restore_nv2 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 537 | #endif /* CTX_INCLUDE_NEVE_REGS */ |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 538 | |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 539 | #if ENABLE_TRF_FOR_NS |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 540 | func el2_sysregs_context_save_trf |
| 541 | /* |
| 542 | * TRFCR_EL2 register is saved only when FEAT_TRF is supported. |
| 543 | */ |
| 544 | mrs x12, TRFCR_EL2 |
| 545 | str x12, [x0, #CTX_TRFCR_EL2] |
| 546 | ret |
| 547 | endfunc el2_sysregs_context_save_trf |
| 548 | |
| 549 | func el2_sysregs_context_restore_trf |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 550 | /* |
| 551 | * TRFCR_EL2 register is restored only when FEAT_TRF is supported. |
| 552 | */ |
| 553 | ldr x12, [x0, #CTX_TRFCR_EL2] |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 554 | msr TRFCR_EL2, x12 |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 555 | ret |
| 556 | endfunc el2_sysregs_context_restore_trf |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 557 | #endif /* ENABLE_TRF_FOR_NS */ |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 558 | |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 559 | #if ENABLE_FEAT_CSV2_2 |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 560 | func el2_sysregs_context_save_csv2 |
| 561 | /* |
| 562 | * SCXTNUM_EL2 register is saved only when FEAT_CSV2_2 is supported. |
| 563 | */ |
| 564 | mrs x13, scxtnum_el2 |
| 565 | str x13, [x0, #CTX_SCXTNUM_EL2] |
| 566 | ret |
| 567 | endfunc el2_sysregs_context_save_csv2 |
| 568 | |
| 569 | func el2_sysregs_context_restore_csv2 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 570 | /* |
| 571 | * SCXTNUM_EL2 register is restored only when FEAT_CSV2_2 is supported. |
| 572 | */ |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 573 | ldr x13, [x0, #CTX_SCXTNUM_EL2] |
| 574 | msr scxtnum_el2, x13 |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 575 | ret |
| 576 | endfunc el2_sysregs_context_restore_csv2 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 577 | #endif /* ENABLE_FEAT_CSV2_2 */ |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 578 | |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 579 | #endif /* CTX_INCLUDE_EL2_REGS */ |
| 580 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 581 | /* ------------------------------------------------------------------ |
| 582 | * The following function strictly follows the AArch64 PCS to use |
| 583 | * x9-x17 (temporary caller-saved registers) to save EL1 system |
| 584 | * register context. It assumes that 'x0' is pointing to a |
| 585 | * 'el1_sys_regs' structure where the register context will be saved. |
| 586 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 587 | */ |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 588 | func el1_sysregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 589 | |
| 590 | mrs x9, spsr_el1 |
| 591 | mrs x10, elr_el1 |
| 592 | stp x9, x10, [x0, #CTX_SPSR_EL1] |
| 593 | |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 594 | #if !ERRATA_SPECULATIVE_AT |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 595 | mrs x15, sctlr_el1 |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 596 | mrs x16, tcr_el1 |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 597 | stp x15, x16, [x0, #CTX_SCTLR_EL1] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 598 | #endif /* ERRATA_SPECULATIVE_AT */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 599 | |
| 600 | mrs x17, cpacr_el1 |
| 601 | mrs x9, csselr_el1 |
| 602 | stp x17, x9, [x0, #CTX_CPACR_EL1] |
| 603 | |
| 604 | mrs x10, sp_el1 |
| 605 | mrs x11, esr_el1 |
| 606 | stp x10, x11, [x0, #CTX_SP_EL1] |
| 607 | |
| 608 | mrs x12, ttbr0_el1 |
| 609 | mrs x13, ttbr1_el1 |
| 610 | stp x12, x13, [x0, #CTX_TTBR0_EL1] |
| 611 | |
| 612 | mrs x14, mair_el1 |
| 613 | mrs x15, amair_el1 |
| 614 | stp x14, x15, [x0, #CTX_MAIR_EL1] |
| 615 | |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 616 | mrs x16, actlr_el1 |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 617 | mrs x17, tpidr_el1 |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 618 | stp x16, x17, [x0, #CTX_ACTLR_EL1] |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 619 | |
| 620 | mrs x9, tpidr_el0 |
| 621 | mrs x10, tpidrro_el0 |
| 622 | stp x9, x10, [x0, #CTX_TPIDR_EL0] |
| 623 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 624 | mrs x13, par_el1 |
| 625 | mrs x14, far_el1 |
| 626 | stp x13, x14, [x0, #CTX_PAR_EL1] |
| 627 | |
| 628 | mrs x15, afsr0_el1 |
| 629 | mrs x16, afsr1_el1 |
| 630 | stp x15, x16, [x0, #CTX_AFSR0_EL1] |
| 631 | |
| 632 | mrs x17, contextidr_el1 |
| 633 | mrs x9, vbar_el1 |
| 634 | stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] |
| 635 | |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 636 | /* Save AArch32 system registers if the build has instructed so */ |
| 637 | #if CTX_INCLUDE_AARCH32_REGS |
| 638 | mrs x11, spsr_abt |
| 639 | mrs x12, spsr_und |
| 640 | stp x11, x12, [x0, #CTX_SPSR_ABT] |
| 641 | |
| 642 | mrs x13, spsr_irq |
| 643 | mrs x14, spsr_fiq |
| 644 | stp x13, x14, [x0, #CTX_SPSR_IRQ] |
| 645 | |
| 646 | mrs x15, dacr32_el2 |
| 647 | mrs x16, ifsr32_el2 |
| 648 | stp x15, x16, [x0, #CTX_DACR32_EL2] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 649 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 650 | |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 651 | /* Save NS timer registers if the build has instructed so */ |
| 652 | #if NS_TIMER_SWITCH |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 653 | mrs x10, cntp_ctl_el0 |
| 654 | mrs x11, cntp_cval_el0 |
| 655 | stp x10, x11, [x0, #CTX_CNTP_CTL_EL0] |
| 656 | |
| 657 | mrs x12, cntv_ctl_el0 |
| 658 | mrs x13, cntv_cval_el0 |
| 659 | stp x12, x13, [x0, #CTX_CNTV_CTL_EL0] |
| 660 | |
| 661 | mrs x14, cntkctl_el1 |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 662 | str x14, [x0, #CTX_CNTKCTL_EL1] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 663 | #endif /* NS_TIMER_SWITCH */ |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 664 | |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 665 | /* Save MTE system registers if the build has instructed so */ |
| 666 | #if CTX_INCLUDE_MTE_REGS |
| 667 | mrs x15, TFSRE0_EL1 |
| 668 | mrs x16, TFSR_EL1 |
| 669 | stp x15, x16, [x0, #CTX_TFSRE0_EL1] |
| 670 | |
| 671 | mrs x9, RGSR_EL1 |
| 672 | mrs x10, GCR_EL1 |
| 673 | stp x9, x10, [x0, #CTX_RGSR_EL1] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 674 | #endif /* CTX_INCLUDE_MTE_REGS */ |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 675 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 676 | ret |
Kévin Petit | a877c25 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 677 | endfunc el1_sysregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 678 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 679 | /* ------------------------------------------------------------------ |
| 680 | * The following function strictly follows the AArch64 PCS to use |
| 681 | * x9-x17 (temporary caller-saved registers) to restore EL1 system |
| 682 | * register context. It assumes that 'x0' is pointing to a |
| 683 | * 'el1_sys_regs' structure from where the register context will be |
| 684 | * restored |
| 685 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 686 | */ |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 687 | func el1_sysregs_context_restore |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 688 | |
| 689 | ldp x9, x10, [x0, #CTX_SPSR_EL1] |
| 690 | msr spsr_el1, x9 |
| 691 | msr elr_el1, x10 |
| 692 | |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 693 | #if !ERRATA_SPECULATIVE_AT |
Manish V Badarkhe | d73c1ba | 2020-07-28 07:12:56 +0100 | [diff] [blame] | 694 | ldp x15, x16, [x0, #CTX_SCTLR_EL1] |
| 695 | msr sctlr_el1, x15 |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 696 | msr tcr_el1, x16 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 697 | #endif /* ERRATA_SPECULATIVE_AT */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 698 | |
| 699 | ldp x17, x9, [x0, #CTX_CPACR_EL1] |
| 700 | msr cpacr_el1, x17 |
| 701 | msr csselr_el1, x9 |
| 702 | |
| 703 | ldp x10, x11, [x0, #CTX_SP_EL1] |
| 704 | msr sp_el1, x10 |
| 705 | msr esr_el1, x11 |
| 706 | |
| 707 | ldp x12, x13, [x0, #CTX_TTBR0_EL1] |
| 708 | msr ttbr0_el1, x12 |
| 709 | msr ttbr1_el1, x13 |
| 710 | |
| 711 | ldp x14, x15, [x0, #CTX_MAIR_EL1] |
| 712 | msr mair_el1, x14 |
| 713 | msr amair_el1, x15 |
| 714 | |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 715 | ldp x16, x17, [x0, #CTX_ACTLR_EL1] |
| 716 | msr actlr_el1, x16 |
Manish V Badarkhe | d73c1ba | 2020-07-28 07:12:56 +0100 | [diff] [blame] | 717 | msr tpidr_el1, x17 |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 718 | |
| 719 | ldp x9, x10, [x0, #CTX_TPIDR_EL0] |
| 720 | msr tpidr_el0, x9 |
| 721 | msr tpidrro_el0, x10 |
| 722 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 723 | ldp x13, x14, [x0, #CTX_PAR_EL1] |
| 724 | msr par_el1, x13 |
| 725 | msr far_el1, x14 |
| 726 | |
| 727 | ldp x15, x16, [x0, #CTX_AFSR0_EL1] |
| 728 | msr afsr0_el1, x15 |
| 729 | msr afsr1_el1, x16 |
| 730 | |
| 731 | ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] |
| 732 | msr contextidr_el1, x17 |
| 733 | msr vbar_el1, x9 |
| 734 | |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 735 | /* Restore AArch32 system registers if the build has instructed so */ |
| 736 | #if CTX_INCLUDE_AARCH32_REGS |
| 737 | ldp x11, x12, [x0, #CTX_SPSR_ABT] |
| 738 | msr spsr_abt, x11 |
| 739 | msr spsr_und, x12 |
| 740 | |
| 741 | ldp x13, x14, [x0, #CTX_SPSR_IRQ] |
| 742 | msr spsr_irq, x13 |
| 743 | msr spsr_fiq, x14 |
| 744 | |
| 745 | ldp x15, x16, [x0, #CTX_DACR32_EL2] |
| 746 | msr dacr32_el2, x15 |
| 747 | msr ifsr32_el2, x16 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 748 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
| 749 | |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 750 | /* Restore NS timer registers if the build has instructed so */ |
| 751 | #if NS_TIMER_SWITCH |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 752 | ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0] |
| 753 | msr cntp_ctl_el0, x10 |
| 754 | msr cntp_cval_el0, x11 |
| 755 | |
| 756 | ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0] |
| 757 | msr cntv_ctl_el0, x12 |
| 758 | msr cntv_cval_el0, x13 |
| 759 | |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 760 | ldr x14, [x0, #CTX_CNTKCTL_EL1] |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 761 | msr cntkctl_el1, x14 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 762 | #endif /* NS_TIMER_SWITCH */ |
| 763 | |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 764 | /* Restore MTE system registers if the build has instructed so */ |
| 765 | #if CTX_INCLUDE_MTE_REGS |
| 766 | ldp x11, x12, [x0, #CTX_TFSRE0_EL1] |
| 767 | msr TFSRE0_EL1, x11 |
| 768 | msr TFSR_EL1, x12 |
| 769 | |
| 770 | ldp x13, x14, [x0, #CTX_RGSR_EL1] |
| 771 | msr RGSR_EL1, x13 |
| 772 | msr GCR_EL1, x14 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 773 | #endif /* CTX_INCLUDE_MTE_REGS */ |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 774 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 775 | /* No explict ISB required here as ERET covers it */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 776 | ret |
Kévin Petit | a877c25 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 777 | endfunc el1_sysregs_context_restore |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 778 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 779 | /* ------------------------------------------------------------------ |
| 780 | * The following function follows the aapcs_64 strictly to use |
| 781 | * x9-x17 (temporary caller-saved registers according to AArch64 PCS) |
| 782 | * to save floating point register context. It assumes that 'x0' is |
| 783 | * pointing to a 'fp_regs' structure where the register context will |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 784 | * be saved. |
| 785 | * |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 786 | * Access to VFP registers will trap if CPTR_EL3.TFP is set. |
| 787 | * However currently we don't use VFP registers nor set traps in |
| 788 | * Trusted Firmware, and assume it's cleared. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 789 | * |
| 790 | * TODO: Revisit when VFP is used in secure world |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 791 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 792 | */ |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 793 | #if CTX_INCLUDE_FPREGS |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 794 | func fpregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 795 | stp q0, q1, [x0, #CTX_FP_Q0] |
| 796 | stp q2, q3, [x0, #CTX_FP_Q2] |
| 797 | stp q4, q5, [x0, #CTX_FP_Q4] |
| 798 | stp q6, q7, [x0, #CTX_FP_Q6] |
| 799 | stp q8, q9, [x0, #CTX_FP_Q8] |
| 800 | stp q10, q11, [x0, #CTX_FP_Q10] |
| 801 | stp q12, q13, [x0, #CTX_FP_Q12] |
| 802 | stp q14, q15, [x0, #CTX_FP_Q14] |
| 803 | stp q16, q17, [x0, #CTX_FP_Q16] |
| 804 | stp q18, q19, [x0, #CTX_FP_Q18] |
| 805 | stp q20, q21, [x0, #CTX_FP_Q20] |
| 806 | stp q22, q23, [x0, #CTX_FP_Q22] |
| 807 | stp q24, q25, [x0, #CTX_FP_Q24] |
| 808 | stp q26, q27, [x0, #CTX_FP_Q26] |
| 809 | stp q28, q29, [x0, #CTX_FP_Q28] |
| 810 | stp q30, q31, [x0, #CTX_FP_Q30] |
| 811 | |
| 812 | mrs x9, fpsr |
| 813 | str x9, [x0, #CTX_FP_FPSR] |
| 814 | |
| 815 | mrs x10, fpcr |
| 816 | str x10, [x0, #CTX_FP_FPCR] |
| 817 | |
David Cunado | d1a1fd4 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 818 | #if CTX_INCLUDE_AARCH32_REGS |
| 819 | mrs x11, fpexc32_el2 |
| 820 | str x11, [x0, #CTX_FP_FPEXC32_EL2] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 821 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 822 | ret |
Kévin Petit | a877c25 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 823 | endfunc fpregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 824 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 825 | /* ------------------------------------------------------------------ |
| 826 | * The following function follows the aapcs_64 strictly to use x9-x17 |
| 827 | * (temporary caller-saved registers according to AArch64 PCS) to |
| 828 | * restore floating point register context. It assumes that 'x0' is |
| 829 | * pointing to a 'fp_regs' structure from where the register context |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 830 | * will be restored. |
| 831 | * |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 832 | * Access to VFP registers will trap if CPTR_EL3.TFP is set. |
| 833 | * However currently we don't use VFP registers nor set traps in |
| 834 | * Trusted Firmware, and assume it's cleared. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 835 | * |
| 836 | * TODO: Revisit when VFP is used in secure world |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 837 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 838 | */ |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 839 | func fpregs_context_restore |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 840 | ldp q0, q1, [x0, #CTX_FP_Q0] |
| 841 | ldp q2, q3, [x0, #CTX_FP_Q2] |
| 842 | ldp q4, q5, [x0, #CTX_FP_Q4] |
| 843 | ldp q6, q7, [x0, #CTX_FP_Q6] |
| 844 | ldp q8, q9, [x0, #CTX_FP_Q8] |
| 845 | ldp q10, q11, [x0, #CTX_FP_Q10] |
| 846 | ldp q12, q13, [x0, #CTX_FP_Q12] |
| 847 | ldp q14, q15, [x0, #CTX_FP_Q14] |
| 848 | ldp q16, q17, [x0, #CTX_FP_Q16] |
| 849 | ldp q18, q19, [x0, #CTX_FP_Q18] |
| 850 | ldp q20, q21, [x0, #CTX_FP_Q20] |
| 851 | ldp q22, q23, [x0, #CTX_FP_Q22] |
| 852 | ldp q24, q25, [x0, #CTX_FP_Q24] |
| 853 | ldp q26, q27, [x0, #CTX_FP_Q26] |
| 854 | ldp q28, q29, [x0, #CTX_FP_Q28] |
| 855 | ldp q30, q31, [x0, #CTX_FP_Q30] |
| 856 | |
| 857 | ldr x9, [x0, #CTX_FP_FPSR] |
| 858 | msr fpsr, x9 |
| 859 | |
Soby Mathew | e77e116 | 2015-12-03 09:42:50 +0000 | [diff] [blame] | 860 | ldr x10, [x0, #CTX_FP_FPCR] |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 861 | msr fpcr, x10 |
| 862 | |
David Cunado | d1a1fd4 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 863 | #if CTX_INCLUDE_AARCH32_REGS |
| 864 | ldr x11, [x0, #CTX_FP_FPEXC32_EL2] |
| 865 | msr fpexc32_el2, x11 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 866 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
| 867 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 868 | /* |
| 869 | * No explict ISB required here as ERET to |
Sandrine Bailleux | f4119ec | 2015-12-17 13:58:58 +0000 | [diff] [blame] | 870 | * switch to secure EL1 or non-secure world |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 871 | * covers it |
| 872 | */ |
| 873 | |
| 874 | ret |
Kévin Petit | a877c25 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 875 | endfunc fpregs_context_restore |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 876 | #endif /* CTX_INCLUDE_FPREGS */ |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 877 | |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 878 | /* |
Manish Pandey | 62d532a | 2022-11-17 15:47:05 +0000 | [diff] [blame] | 879 | * Set SCR_EL3.EA bit to enable SErrors at EL3 |
| 880 | */ |
| 881 | .macro enable_serror_at_el3 |
| 882 | mrs x8, scr_el3 |
| 883 | orr x8, x8, #SCR_EA_BIT |
| 884 | msr scr_el3, x8 |
| 885 | .endm |
| 886 | |
| 887 | /* |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 888 | * Set the PSTATE bits not set when the exception was taken as |
| 889 | * described in the AArch64.TakeException() pseudocode function |
| 890 | * in ARM DDI 0487F.c page J1-7635 to a default value. |
| 891 | */ |
| 892 | .macro set_unset_pstate_bits |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 893 | /* |
| 894 | * If Data Independent Timing (DIT) functionality is implemented, |
| 895 | * always enable DIT in EL3 |
| 896 | */ |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 897 | #if ENABLE_FEAT_DIT |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 898 | mov x8, #DIT_BIT |
| 899 | msr DIT, x8 |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 900 | #endif /* ENABLE_FEAT_DIT */ |
| 901 | .endm /* set_unset_pstate_bits */ |
| 902 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 903 | /* ------------------------------------------------------------------ |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 904 | * The following macro is used to save and restore all the general |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 905 | * purpose and ARMv8.3-PAuth (if enabled) registers. |
Jayanth Dodderi Chidanand | 4ec78ad | 2022-09-19 23:32:08 +0100 | [diff] [blame] | 906 | * It also checks if the Secure Cycle Counter (PMCCNTR_EL0) |
| 907 | * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0 |
| 908 | * needs not to be saved/restored during world switch. |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 909 | * |
| 910 | * Ideally we would only save and restore the callee saved registers |
| 911 | * when a world switch occurs but that type of implementation is more |
| 912 | * complex. So currently we will always save and restore these |
| 913 | * registers on entry and exit of EL3. |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 914 | * clobbers: x18 |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 915 | * ------------------------------------------------------------------ |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 916 | */ |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 917 | .macro save_gp_pmcr_pauth_regs |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 918 | stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] |
| 919 | stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] |
| 920 | stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] |
| 921 | stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] |
| 922 | stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] |
| 923 | stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] |
| 924 | stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] |
| 925 | stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] |
| 926 | stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] |
| 927 | stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] |
| 928 | stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] |
| 929 | stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] |
| 930 | stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] |
| 931 | stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] |
| 932 | stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] |
| 933 | mrs x18, sp_el0 |
| 934 | str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 935 | |
| 936 | /* ---------------------------------------------------------- |
Jayanth Dodderi Chidanand | 4ec78ad | 2022-09-19 23:32:08 +0100 | [diff] [blame] | 937 | * Check if earlier initialization of MDCR_EL3.SCCD/MCCD to 1 |
| 938 | * has failed. |
| 939 | * |
| 940 | * MDCR_EL3: |
| 941 | * MCCD bit set, Prohibits the Cycle Counter PMCCNTR_EL0 from |
| 942 | * counting at EL3. |
| 943 | * SCCD bit set, Secure Cycle Counter Disable. Prohibits PMCCNTR_EL0 |
| 944 | * from counting in Secure state. |
| 945 | * If these bits are not set, meaning that FEAT_PMUv3p5/7 is |
| 946 | * not implemented and PMCR_EL0 should be saved in non-secure |
| 947 | * context. |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 948 | * ---------------------------------------------------------- |
| 949 | */ |
Alexei Fedorov | 307f34b | 2021-05-14 11:21:56 +0100 | [diff] [blame] | 950 | mov_imm x10, (MDCR_SCCD_BIT | MDCR_MCCD_BIT) |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 951 | mrs x9, mdcr_el3 |
Alexei Fedorov | 307f34b | 2021-05-14 11:21:56 +0100 | [diff] [blame] | 952 | tst x9, x10 |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 953 | bne 1f |
| 954 | |
Jayanth Dodderi Chidanand | 4ec78ad | 2022-09-19 23:32:08 +0100 | [diff] [blame] | 955 | /* ---------------------------------------------------------- |
| 956 | * If control reaches here, it ensures the Secure Cycle |
| 957 | * Counter (PMCCNTR_EL0) is not prohibited from counting at |
| 958 | * EL3 and in secure states. |
| 959 | * Henceforth, PMCR_EL0 to be saved before world switch. |
| 960 | * ---------------------------------------------------------- |
| 961 | */ |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 962 | mrs x9, pmcr_el0 |
| 963 | |
| 964 | /* Check caller's security state */ |
| 965 | mrs x10, scr_el3 |
| 966 | tst x10, #SCR_NS_BIT |
| 967 | beq 2f |
| 968 | |
| 969 | /* Save PMCR_EL0 if called from Non-secure state */ |
| 970 | str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] |
| 971 | |
| 972 | /* Disable cycle counter when event counting is prohibited */ |
| 973 | 2: orr x9, x9, #PMCR_EL0_DP_BIT |
| 974 | msr pmcr_el0, x9 |
| 975 | isb |
| 976 | 1: |
| 977 | #if CTX_INCLUDE_PAUTH_REGS |
| 978 | /* ---------------------------------------------------------- |
| 979 | * Save the ARMv8.3-PAuth keys as they are not banked |
| 980 | * by exception level |
| 981 | * ---------------------------------------------------------- |
| 982 | */ |
| 983 | add x19, sp, #CTX_PAUTH_REGS_OFFSET |
| 984 | |
| 985 | mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ |
| 986 | mrs x21, APIAKeyHi_EL1 |
| 987 | mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ |
| 988 | mrs x23, APIBKeyHi_EL1 |
| 989 | mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ |
| 990 | mrs x25, APDAKeyHi_EL1 |
| 991 | mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ |
| 992 | mrs x27, APDBKeyHi_EL1 |
| 993 | mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ |
| 994 | mrs x29, APGAKeyHi_EL1 |
| 995 | |
| 996 | stp x20, x21, [x19, #CTX_PACIAKEY_LO] |
| 997 | stp x22, x23, [x19, #CTX_PACIBKEY_LO] |
| 998 | stp x24, x25, [x19, #CTX_PACDAKEY_LO] |
| 999 | stp x26, x27, [x19, #CTX_PACDBKEY_LO] |
| 1000 | stp x28, x29, [x19, #CTX_PACGAKEY_LO] |
| 1001 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 1002 | .endm /* save_gp_pmcr_pauth_regs */ |
| 1003 | |
| 1004 | /* ----------------------------------------------------------------- |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 1005 | * This function saves the context and sets the PSTATE to a known |
| 1006 | * state, preparing entry to el3. |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 1007 | * Save all the general purpose and ARMv8.3-PAuth (if enabled) |
| 1008 | * registers. |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 1009 | * Then set any of the PSTATE bits that are not set by hardware |
| 1010 | * according to the Aarch64.TakeException pseudocode in the Arm |
| 1011 | * Architecture Reference Manual to a default value for EL3. |
| 1012 | * clobbers: x17 |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 1013 | * ----------------------------------------------------------------- |
| 1014 | */ |
| 1015 | func prepare_el3_entry |
| 1016 | save_gp_pmcr_pauth_regs |
Manish Pandey | 62d532a | 2022-11-17 15:47:05 +0000 | [diff] [blame] | 1017 | enable_serror_at_el3 |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 1018 | /* |
| 1019 | * Set the PSTATE bits not described in the Aarch64.TakeException |
| 1020 | * pseudocode to their default values. |
| 1021 | */ |
| 1022 | set_unset_pstate_bits |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 1023 | ret |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 1024 | endfunc prepare_el3_entry |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 1025 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 1026 | /* ------------------------------------------------------------------ |
| 1027 | * This function restores ARMv8.3-PAuth (if enabled) and all general |
| 1028 | * purpose registers except x30 from the CPU context. |
| 1029 | * x30 register must be explicitly restored by the caller. |
| 1030 | * ------------------------------------------------------------------ |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 1031 | */ |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 1032 | func restore_gp_pmcr_pauth_regs |
| 1033 | #if CTX_INCLUDE_PAUTH_REGS |
| 1034 | /* Restore the ARMv8.3 PAuth keys */ |
| 1035 | add x10, sp, #CTX_PAUTH_REGS_OFFSET |
| 1036 | |
| 1037 | ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ |
| 1038 | ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ |
| 1039 | ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ |
| 1040 | ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ |
| 1041 | ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ |
| 1042 | |
| 1043 | msr APIAKeyLo_EL1, x0 |
| 1044 | msr APIAKeyHi_EL1, x1 |
| 1045 | msr APIBKeyLo_EL1, x2 |
| 1046 | msr APIBKeyHi_EL1, x3 |
| 1047 | msr APDAKeyLo_EL1, x4 |
| 1048 | msr APDAKeyHi_EL1, x5 |
| 1049 | msr APDBKeyLo_EL1, x6 |
| 1050 | msr APDBKeyHi_EL1, x7 |
| 1051 | msr APGAKeyLo_EL1, x8 |
| 1052 | msr APGAKeyHi_EL1, x9 |
| 1053 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
| 1054 | |
| 1055 | /* ---------------------------------------------------------- |
| 1056 | * Restore PMCR_EL0 when returning to Non-secure state if |
| 1057 | * Secure Cycle Counter is not disabled in MDCR_EL3 when |
| 1058 | * ARMv8.5-PMU is implemented. |
| 1059 | * ---------------------------------------------------------- |
| 1060 | */ |
| 1061 | mrs x0, scr_el3 |
| 1062 | tst x0, #SCR_NS_BIT |
| 1063 | beq 2f |
| 1064 | |
| 1065 | /* ---------------------------------------------------------- |
| 1066 | * Back to Non-secure state. |
Alexei Fedorov | 307f34b | 2021-05-14 11:21:56 +0100 | [diff] [blame] | 1067 | * Check if earlier initialization MDCR_EL3.SCCD/MCCD to 1 |
| 1068 | * failed, meaning that FEAT_PMUv3p5/7 is not implemented and |
| 1069 | * PMCR_EL0 should be restored from non-secure context. |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 1070 | * ---------------------------------------------------------- |
| 1071 | */ |
Alexei Fedorov | 307f34b | 2021-05-14 11:21:56 +0100 | [diff] [blame] | 1072 | mov_imm x1, (MDCR_SCCD_BIT | MDCR_MCCD_BIT) |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 1073 | mrs x0, mdcr_el3 |
Alexei Fedorov | 307f34b | 2021-05-14 11:21:56 +0100 | [diff] [blame] | 1074 | tst x0, x1 |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 1075 | bne 2f |
| 1076 | ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] |
| 1077 | msr pmcr_el0, x0 |
| 1078 | 2: |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 1079 | ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] |
| 1080 | ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 1081 | ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] |
| 1082 | ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] |
| 1083 | ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] |
| 1084 | ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] |
| 1085 | ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] |
| 1086 | ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 1087 | ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 1088 | ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] |
| 1089 | ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] |
| 1090 | ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] |
| 1091 | ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] |
| 1092 | ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 1093 | ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] |
| 1094 | msr sp_el0, x28 |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 1095 | ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 1096 | ret |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 1097 | endfunc restore_gp_pmcr_pauth_regs |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 1098 | |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 1099 | /* |
| 1100 | * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1 |
| 1101 | * registers and update EL1 registers to disable stage1 and stage2 |
| 1102 | * page table walk |
| 1103 | */ |
| 1104 | func save_and_update_ptw_el1_sys_regs |
| 1105 | /* ---------------------------------------------------------- |
| 1106 | * Save only sctlr_el1 and tcr_el1 registers |
| 1107 | * ---------------------------------------------------------- |
| 1108 | */ |
| 1109 | mrs x29, sctlr_el1 |
| 1110 | str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)] |
| 1111 | mrs x29, tcr_el1 |
| 1112 | str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)] |
| 1113 | |
| 1114 | /* ------------------------------------------------------------ |
| 1115 | * Must follow below order in order to disable page table |
| 1116 | * walk for lower ELs (EL1 and EL0). First step ensures that |
| 1117 | * page table walk is disabled for stage1 and second step |
| 1118 | * ensures that page table walker should use TCR_EL1.EPDx |
| 1119 | * bits to perform address translation. ISB ensures that CPU |
| 1120 | * does these 2 steps in order. |
| 1121 | * |
| 1122 | * 1. Update TCR_EL1.EPDx bits to disable page table walk by |
| 1123 | * stage1. |
| 1124 | * 2. Enable MMU bit to avoid identity mapping via stage2 |
| 1125 | * and force TCR_EL1.EPDx to be used by the page table |
| 1126 | * walker. |
| 1127 | * ------------------------------------------------------------ |
| 1128 | */ |
| 1129 | orr x29, x29, #(TCR_EPD0_BIT) |
| 1130 | orr x29, x29, #(TCR_EPD1_BIT) |
| 1131 | msr tcr_el1, x29 |
| 1132 | isb |
| 1133 | mrs x29, sctlr_el1 |
| 1134 | orr x29, x29, #SCTLR_M_BIT |
| 1135 | msr sctlr_el1, x29 |
| 1136 | isb |
| 1137 | |
| 1138 | ret |
| 1139 | endfunc save_and_update_ptw_el1_sys_regs |
| 1140 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 1141 | /* ------------------------------------------------------------------ |
| 1142 | * This routine assumes that the SP_EL3 is pointing to a valid |
| 1143 | * context structure from where the gp regs and other special |
| 1144 | * registers can be retrieved. |
| 1145 | * ------------------------------------------------------------------ |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 1146 | */ |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 1147 | func el3_exit |
Jan Dabros | fa01598 | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 1148 | #if ENABLE_ASSERTIONS |
| 1149 | /* el3_exit assumes SP_EL0 on entry */ |
| 1150 | mrs x17, spsel |
| 1151 | cmp x17, #MODE_SP_EL0 |
| 1152 | ASM_ASSERT(eq) |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 1153 | #endif /* ENABLE_ASSERTIONS */ |
Jan Dabros | fa01598 | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 1154 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 1155 | /* ---------------------------------------------------------- |
| 1156 | * Save the current SP_EL0 i.e. the EL3 runtime stack which |
| 1157 | * will be used for handling the next SMC. |
| 1158 | * Then switch to SP_EL3. |
| 1159 | * ---------------------------------------------------------- |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 1160 | */ |
| 1161 | mov x17, sp |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 1162 | msr spsel, #MODE_SP_ELX |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 1163 | str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] |
| 1164 | |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 1165 | #if IMAGE_BL31 |
| 1166 | /* ---------------------------------------------------------- |
Arunachalam Ganapathy | cac7d16 | 2021-07-08 09:35:57 +0100 | [diff] [blame] | 1167 | * Restore CPTR_EL3. |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 1168 | * ZCR is only restored if SVE is supported and enabled. |
| 1169 | * Synchronization is required before zcr_el3 is addressed. |
| 1170 | * ---------------------------------------------------------- |
| 1171 | */ |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 1172 | ldp x19, x20, [sp, #CTX_EL3STATE_OFFSET + CTX_CPTR_EL3] |
| 1173 | msr cptr_el3, x19 |
| 1174 | |
| 1175 | ands x19, x19, #CPTR_EZ_BIT |
| 1176 | beq sve_not_enabled |
| 1177 | |
| 1178 | isb |
| 1179 | msr S3_6_C1_C2_0, x20 /* zcr_el3 */ |
| 1180 | sve_not_enabled: |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 1181 | #endif /* IMAGE_BL31 */ |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 1182 | |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 1183 | #if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 1184 | /* ---------------------------------------------------------- |
| 1185 | * Restore mitigation state as it was on entry to EL3 |
| 1186 | * ---------------------------------------------------------- |
| 1187 | */ |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 1188 | ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 1189 | cbz x17, 1f |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 1190 | blr x17 |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 1191 | 1: |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 1192 | #endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */ |
| 1193 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 1194 | #if IMAGE_BL31 && RAS_EXTENSION |
| 1195 | /* ---------------------------------------------------------- |
| 1196 | * Issue Error Synchronization Barrier to synchronize SErrors |
| 1197 | * before exiting EL3. We're running with EAs unmasked, so |
| 1198 | * any synchronized errors would be taken immediately; |
| 1199 | * therefore no need to inspect DISR_EL1 register. |
| 1200 | * ---------------------------------------------------------- |
| 1201 | */ |
| 1202 | esb |
Madhukar Pappireddy | fba2572 | 2020-07-24 03:27:12 -0500 | [diff] [blame] | 1203 | #else |
| 1204 | dsb sy |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 1205 | #endif /* IMAGE_BL31 && RAS_EXTENSION */ |
| 1206 | |
Manish Pandey | 53bc59a | 2022-11-17 14:43:15 +0000 | [diff] [blame] | 1207 | /* ---------------------------------------------------------- |
| 1208 | * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET |
| 1209 | * ---------------------------------------------------------- |
| 1210 | */ |
| 1211 | ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] |
| 1212 | ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] |
| 1213 | msr scr_el3, x18 |
| 1214 | msr spsr_el3, x16 |
| 1215 | msr elr_el3, x17 |
| 1216 | |
| 1217 | restore_ptw_el1_sys_regs |
| 1218 | |
| 1219 | /* ---------------------------------------------------------- |
| 1220 | * Restore general purpose (including x30), PMCR_EL0 and |
| 1221 | * ARMv8.3-PAuth registers. |
| 1222 | * Exit EL3 via ERET to a lower exception level. |
| 1223 | * ---------------------------------------------------------- |
| 1224 | */ |
| 1225 | bl restore_gp_pmcr_pauth_regs |
| 1226 | ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] |
| 1227 | |
Madhukar Pappireddy | fba2572 | 2020-07-24 03:27:12 -0500 | [diff] [blame] | 1228 | #ifdef IMAGE_BL31 |
| 1229 | str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 1230 | #endif /* IMAGE_BL31 */ |
| 1231 | |
Anthony Steinhauser | 0f7e601 | 2020-01-07 15:44:06 -0800 | [diff] [blame] | 1232 | exception_return |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 1233 | |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 1234 | endfunc el3_exit |