Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 1 | /* |
Govindraj Raja | 77922ca | 2024-01-25 08:09:39 -0600 | [diff] [blame] | 2 | * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 3 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 4 | * SPDX-License-Identifier: BSD-3-Clause |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 5 | */ |
| 6 | |
Dan Handley | 2bd4ef2 | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 7 | #include <arch.h> |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 8 | #include <asm_macros.S> |
Jan Dabros | fa01598 | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 9 | #include <assert_macros.S> |
Dan Handley | 2bd4ef2 | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 10 | #include <context.h> |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 11 | #include <el3_common_macros.S> |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 12 | |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 13 | .global el1_sysregs_context_save |
| 14 | .global el1_sysregs_context_restore |
| 15 | #if CTX_INCLUDE_FPREGS |
| 16 | .global fpregs_context_save |
| 17 | .global fpregs_context_restore |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 18 | #endif /* CTX_INCLUDE_FPREGS */ |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 19 | .global prepare_el3_entry |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 20 | .global restore_gp_pmcr_pauth_regs |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 21 | .global save_and_update_ptw_el1_sys_regs |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 22 | .global el3_exit |
| 23 | |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 24 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 25 | /* ------------------------------------------------------------------ |
| 26 | * The following function strictly follows the AArch64 PCS to use |
| 27 | * x9-x17 (temporary caller-saved registers) to save EL1 system |
| 28 | * register context. It assumes that 'x0' is pointing to a |
| 29 | * 'el1_sys_regs' structure where the register context will be saved. |
| 30 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 31 | */ |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 32 | func el1_sysregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 33 | |
| 34 | mrs x9, spsr_el1 |
| 35 | mrs x10, elr_el1 |
| 36 | stp x9, x10, [x0, #CTX_SPSR_EL1] |
| 37 | |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 38 | #if !ERRATA_SPECULATIVE_AT |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 39 | mrs x15, sctlr_el1 |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 40 | mrs x16, tcr_el1 |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 41 | stp x15, x16, [x0, #CTX_SCTLR_EL1] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 42 | #endif /* ERRATA_SPECULATIVE_AT */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 43 | |
| 44 | mrs x17, cpacr_el1 |
| 45 | mrs x9, csselr_el1 |
| 46 | stp x17, x9, [x0, #CTX_CPACR_EL1] |
| 47 | |
| 48 | mrs x10, sp_el1 |
| 49 | mrs x11, esr_el1 |
| 50 | stp x10, x11, [x0, #CTX_SP_EL1] |
| 51 | |
| 52 | mrs x12, ttbr0_el1 |
| 53 | mrs x13, ttbr1_el1 |
| 54 | stp x12, x13, [x0, #CTX_TTBR0_EL1] |
| 55 | |
| 56 | mrs x14, mair_el1 |
| 57 | mrs x15, amair_el1 |
| 58 | stp x14, x15, [x0, #CTX_MAIR_EL1] |
| 59 | |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 60 | mrs x16, actlr_el1 |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 61 | mrs x17, tpidr_el1 |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 62 | stp x16, x17, [x0, #CTX_ACTLR_EL1] |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 63 | |
| 64 | mrs x9, tpidr_el0 |
| 65 | mrs x10, tpidrro_el0 |
| 66 | stp x9, x10, [x0, #CTX_TPIDR_EL0] |
| 67 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 68 | mrs x13, par_el1 |
| 69 | mrs x14, far_el1 |
| 70 | stp x13, x14, [x0, #CTX_PAR_EL1] |
| 71 | |
| 72 | mrs x15, afsr0_el1 |
| 73 | mrs x16, afsr1_el1 |
| 74 | stp x15, x16, [x0, #CTX_AFSR0_EL1] |
| 75 | |
| 76 | mrs x17, contextidr_el1 |
| 77 | mrs x9, vbar_el1 |
| 78 | stp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] |
| 79 | |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 80 | /* Save AArch32 system registers if the build has instructed so */ |
| 81 | #if CTX_INCLUDE_AARCH32_REGS |
| 82 | mrs x11, spsr_abt |
| 83 | mrs x12, spsr_und |
| 84 | stp x11, x12, [x0, #CTX_SPSR_ABT] |
| 85 | |
| 86 | mrs x13, spsr_irq |
| 87 | mrs x14, spsr_fiq |
| 88 | stp x13, x14, [x0, #CTX_SPSR_IRQ] |
| 89 | |
| 90 | mrs x15, dacr32_el2 |
| 91 | mrs x16, ifsr32_el2 |
| 92 | stp x15, x16, [x0, #CTX_DACR32_EL2] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 93 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 94 | |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 95 | /* Save NS timer registers if the build has instructed so */ |
| 96 | #if NS_TIMER_SWITCH |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 97 | mrs x10, cntp_ctl_el0 |
| 98 | mrs x11, cntp_cval_el0 |
| 99 | stp x10, x11, [x0, #CTX_CNTP_CTL_EL0] |
| 100 | |
| 101 | mrs x12, cntv_ctl_el0 |
| 102 | mrs x13, cntv_cval_el0 |
| 103 | stp x12, x13, [x0, #CTX_CNTV_CTL_EL0] |
| 104 | |
| 105 | mrs x14, cntkctl_el1 |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 106 | str x14, [x0, #CTX_CNTKCTL_EL1] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 107 | #endif /* NS_TIMER_SWITCH */ |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 108 | |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 109 | /* Save MTE system registers if the build has instructed so */ |
Govindraj Raja | 77922ca | 2024-01-25 08:09:39 -0600 | [diff] [blame] | 110 | #if ENABLE_FEAT_MTE |
| 111 | #if ENABLE_FEAT_MTE == 2 |
| 112 | mrs x8, id_aa64pfr1_el1 |
| 113 | and x8, x8, #(ID_AA64PFR1_EL1_MTE_MASK << ID_AA64PFR1_EL1_MTE_SHIFT) |
| 114 | cbz x8, no_mte_save |
| 115 | #endif |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 116 | mrs x15, TFSRE0_EL1 |
| 117 | mrs x16, TFSR_EL1 |
| 118 | stp x15, x16, [x0, #CTX_TFSRE0_EL1] |
| 119 | |
| 120 | mrs x9, RGSR_EL1 |
| 121 | mrs x10, GCR_EL1 |
| 122 | stp x9, x10, [x0, #CTX_RGSR_EL1] |
Govindraj Raja | 77922ca | 2024-01-25 08:09:39 -0600 | [diff] [blame] | 123 | |
| 124 | no_mte_save: |
| 125 | #endif /* ENABLE_FEAT_MTE */ |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 126 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 127 | ret |
Kévin Petit | a877c25 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 128 | endfunc el1_sysregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 129 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 130 | /* ------------------------------------------------------------------ |
| 131 | * The following function strictly follows the AArch64 PCS to use |
| 132 | * x9-x17 (temporary caller-saved registers) to restore EL1 system |
| 133 | * register context. It assumes that 'x0' is pointing to a |
| 134 | * 'el1_sys_regs' structure from where the register context will be |
| 135 | * restored |
| 136 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 137 | */ |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 138 | func el1_sysregs_context_restore |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 139 | |
| 140 | ldp x9, x10, [x0, #CTX_SPSR_EL1] |
| 141 | msr spsr_el1, x9 |
| 142 | msr elr_el1, x10 |
| 143 | |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 144 | #if !ERRATA_SPECULATIVE_AT |
Manish V Badarkhe | d73c1ba | 2020-07-28 07:12:56 +0100 | [diff] [blame] | 145 | ldp x15, x16, [x0, #CTX_SCTLR_EL1] |
| 146 | msr sctlr_el1, x15 |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 147 | msr tcr_el1, x16 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 148 | #endif /* ERRATA_SPECULATIVE_AT */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 149 | |
| 150 | ldp x17, x9, [x0, #CTX_CPACR_EL1] |
| 151 | msr cpacr_el1, x17 |
| 152 | msr csselr_el1, x9 |
| 153 | |
| 154 | ldp x10, x11, [x0, #CTX_SP_EL1] |
| 155 | msr sp_el1, x10 |
| 156 | msr esr_el1, x11 |
| 157 | |
| 158 | ldp x12, x13, [x0, #CTX_TTBR0_EL1] |
| 159 | msr ttbr0_el1, x12 |
| 160 | msr ttbr1_el1, x13 |
| 161 | |
| 162 | ldp x14, x15, [x0, #CTX_MAIR_EL1] |
| 163 | msr mair_el1, x14 |
| 164 | msr amair_el1, x15 |
| 165 | |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 166 | ldp x16, x17, [x0, #CTX_ACTLR_EL1] |
| 167 | msr actlr_el1, x16 |
Manish V Badarkhe | d73c1ba | 2020-07-28 07:12:56 +0100 | [diff] [blame] | 168 | msr tpidr_el1, x17 |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 169 | |
| 170 | ldp x9, x10, [x0, #CTX_TPIDR_EL0] |
| 171 | msr tpidr_el0, x9 |
| 172 | msr tpidrro_el0, x10 |
| 173 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 174 | ldp x13, x14, [x0, #CTX_PAR_EL1] |
| 175 | msr par_el1, x13 |
| 176 | msr far_el1, x14 |
| 177 | |
| 178 | ldp x15, x16, [x0, #CTX_AFSR0_EL1] |
| 179 | msr afsr0_el1, x15 |
| 180 | msr afsr1_el1, x16 |
| 181 | |
| 182 | ldp x17, x9, [x0, #CTX_CONTEXTIDR_EL1] |
| 183 | msr contextidr_el1, x17 |
| 184 | msr vbar_el1, x9 |
| 185 | |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 186 | /* Restore AArch32 system registers if the build has instructed so */ |
| 187 | #if CTX_INCLUDE_AARCH32_REGS |
| 188 | ldp x11, x12, [x0, #CTX_SPSR_ABT] |
| 189 | msr spsr_abt, x11 |
| 190 | msr spsr_und, x12 |
| 191 | |
| 192 | ldp x13, x14, [x0, #CTX_SPSR_IRQ] |
| 193 | msr spsr_irq, x13 |
| 194 | msr spsr_fiq, x14 |
| 195 | |
| 196 | ldp x15, x16, [x0, #CTX_DACR32_EL2] |
| 197 | msr dacr32_el2, x15 |
| 198 | msr ifsr32_el2, x16 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 199 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
| 200 | |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 201 | /* Restore NS timer registers if the build has instructed so */ |
| 202 | #if NS_TIMER_SWITCH |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 203 | ldp x10, x11, [x0, #CTX_CNTP_CTL_EL0] |
| 204 | msr cntp_ctl_el0, x10 |
| 205 | msr cntp_cval_el0, x11 |
| 206 | |
| 207 | ldp x12, x13, [x0, #CTX_CNTV_CTL_EL0] |
| 208 | msr cntv_ctl_el0, x12 |
| 209 | msr cntv_cval_el0, x13 |
| 210 | |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 211 | ldr x14, [x0, #CTX_CNTKCTL_EL1] |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 212 | msr cntkctl_el1, x14 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 213 | #endif /* NS_TIMER_SWITCH */ |
| 214 | |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 215 | /* Restore MTE system registers if the build has instructed so */ |
Govindraj Raja | 77922ca | 2024-01-25 08:09:39 -0600 | [diff] [blame] | 216 | #if ENABLE_FEAT_MTE |
| 217 | #if ENABLE_FEAT_MTE == 2 |
| 218 | mrs x8, id_aa64pfr1_el1 |
| 219 | and x8, x8, #(ID_AA64PFR1_EL1_MTE_MASK << ID_AA64PFR1_EL1_MTE_SHIFT) |
| 220 | cbz x8, no_mte_restore |
| 221 | #endif |
| 222 | |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 223 | ldp x11, x12, [x0, #CTX_TFSRE0_EL1] |
| 224 | msr TFSRE0_EL1, x11 |
| 225 | msr TFSR_EL1, x12 |
| 226 | |
| 227 | ldp x13, x14, [x0, #CTX_RGSR_EL1] |
| 228 | msr RGSR_EL1, x13 |
| 229 | msr GCR_EL1, x14 |
Govindraj Raja | 77922ca | 2024-01-25 08:09:39 -0600 | [diff] [blame] | 230 | |
| 231 | no_mte_restore: |
| 232 | #endif /* ENABLE_FEAT_MTE */ |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 233 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 234 | /* No explict ISB required here as ERET covers it */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 235 | ret |
Kévin Petit | a877c25 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 236 | endfunc el1_sysregs_context_restore |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 237 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 238 | /* ------------------------------------------------------------------ |
| 239 | * The following function follows the aapcs_64 strictly to use |
| 240 | * x9-x17 (temporary caller-saved registers according to AArch64 PCS) |
| 241 | * to save floating point register context. It assumes that 'x0' is |
| 242 | * pointing to a 'fp_regs' structure where the register context will |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 243 | * be saved. |
| 244 | * |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 245 | * Access to VFP registers will trap if CPTR_EL3.TFP is set. |
| 246 | * However currently we don't use VFP registers nor set traps in |
| 247 | * Trusted Firmware, and assume it's cleared. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 248 | * |
| 249 | * TODO: Revisit when VFP is used in secure world |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 250 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 251 | */ |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 252 | #if CTX_INCLUDE_FPREGS |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 253 | func fpregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 254 | stp q0, q1, [x0, #CTX_FP_Q0] |
| 255 | stp q2, q3, [x0, #CTX_FP_Q2] |
| 256 | stp q4, q5, [x0, #CTX_FP_Q4] |
| 257 | stp q6, q7, [x0, #CTX_FP_Q6] |
| 258 | stp q8, q9, [x0, #CTX_FP_Q8] |
| 259 | stp q10, q11, [x0, #CTX_FP_Q10] |
| 260 | stp q12, q13, [x0, #CTX_FP_Q12] |
| 261 | stp q14, q15, [x0, #CTX_FP_Q14] |
| 262 | stp q16, q17, [x0, #CTX_FP_Q16] |
| 263 | stp q18, q19, [x0, #CTX_FP_Q18] |
| 264 | stp q20, q21, [x0, #CTX_FP_Q20] |
| 265 | stp q22, q23, [x0, #CTX_FP_Q22] |
| 266 | stp q24, q25, [x0, #CTX_FP_Q24] |
| 267 | stp q26, q27, [x0, #CTX_FP_Q26] |
| 268 | stp q28, q29, [x0, #CTX_FP_Q28] |
| 269 | stp q30, q31, [x0, #CTX_FP_Q30] |
| 270 | |
| 271 | mrs x9, fpsr |
| 272 | str x9, [x0, #CTX_FP_FPSR] |
| 273 | |
| 274 | mrs x10, fpcr |
| 275 | str x10, [x0, #CTX_FP_FPCR] |
| 276 | |
David Cunado | d1a1fd4 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 277 | #if CTX_INCLUDE_AARCH32_REGS |
| 278 | mrs x11, fpexc32_el2 |
| 279 | str x11, [x0, #CTX_FP_FPEXC32_EL2] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 280 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 281 | ret |
Kévin Petit | a877c25 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 282 | endfunc fpregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 283 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 284 | /* ------------------------------------------------------------------ |
| 285 | * The following function follows the aapcs_64 strictly to use x9-x17 |
| 286 | * (temporary caller-saved registers according to AArch64 PCS) to |
| 287 | * restore floating point register context. It assumes that 'x0' is |
| 288 | * pointing to a 'fp_regs' structure from where the register context |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 289 | * will be restored. |
| 290 | * |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 291 | * Access to VFP registers will trap if CPTR_EL3.TFP is set. |
| 292 | * However currently we don't use VFP registers nor set traps in |
| 293 | * Trusted Firmware, and assume it's cleared. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 294 | * |
| 295 | * TODO: Revisit when VFP is used in secure world |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 296 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 297 | */ |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 298 | func fpregs_context_restore |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 299 | ldp q0, q1, [x0, #CTX_FP_Q0] |
| 300 | ldp q2, q3, [x0, #CTX_FP_Q2] |
| 301 | ldp q4, q5, [x0, #CTX_FP_Q4] |
| 302 | ldp q6, q7, [x0, #CTX_FP_Q6] |
| 303 | ldp q8, q9, [x0, #CTX_FP_Q8] |
| 304 | ldp q10, q11, [x0, #CTX_FP_Q10] |
| 305 | ldp q12, q13, [x0, #CTX_FP_Q12] |
| 306 | ldp q14, q15, [x0, #CTX_FP_Q14] |
| 307 | ldp q16, q17, [x0, #CTX_FP_Q16] |
| 308 | ldp q18, q19, [x0, #CTX_FP_Q18] |
| 309 | ldp q20, q21, [x0, #CTX_FP_Q20] |
| 310 | ldp q22, q23, [x0, #CTX_FP_Q22] |
| 311 | ldp q24, q25, [x0, #CTX_FP_Q24] |
| 312 | ldp q26, q27, [x0, #CTX_FP_Q26] |
| 313 | ldp q28, q29, [x0, #CTX_FP_Q28] |
| 314 | ldp q30, q31, [x0, #CTX_FP_Q30] |
| 315 | |
| 316 | ldr x9, [x0, #CTX_FP_FPSR] |
| 317 | msr fpsr, x9 |
| 318 | |
Soby Mathew | e77e116 | 2015-12-03 09:42:50 +0000 | [diff] [blame] | 319 | ldr x10, [x0, #CTX_FP_FPCR] |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 320 | msr fpcr, x10 |
| 321 | |
David Cunado | d1a1fd4 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 322 | #if CTX_INCLUDE_AARCH32_REGS |
| 323 | ldr x11, [x0, #CTX_FP_FPEXC32_EL2] |
| 324 | msr fpexc32_el2, x11 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 325 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
| 326 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 327 | /* |
| 328 | * No explict ISB required here as ERET to |
Sandrine Bailleux | f4119ec | 2015-12-17 13:58:58 +0000 | [diff] [blame] | 329 | * switch to secure EL1 or non-secure world |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 330 | * covers it |
| 331 | */ |
| 332 | |
| 333 | ret |
Kévin Petit | a877c25 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 334 | endfunc fpregs_context_restore |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 335 | #endif /* CTX_INCLUDE_FPREGS */ |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 336 | |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 337 | /* |
Manish Pandey | 62d532a | 2022-11-17 15:47:05 +0000 | [diff] [blame] | 338 | * Set SCR_EL3.EA bit to enable SErrors at EL3 |
| 339 | */ |
| 340 | .macro enable_serror_at_el3 |
| 341 | mrs x8, scr_el3 |
| 342 | orr x8, x8, #SCR_EA_BIT |
| 343 | msr scr_el3, x8 |
| 344 | .endm |
| 345 | |
| 346 | /* |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 347 | * Set the PSTATE bits not set when the exception was taken as |
| 348 | * described in the AArch64.TakeException() pseudocode function |
| 349 | * in ARM DDI 0487F.c page J1-7635 to a default value. |
| 350 | */ |
| 351 | .macro set_unset_pstate_bits |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 352 | /* |
| 353 | * If Data Independent Timing (DIT) functionality is implemented, |
| 354 | * always enable DIT in EL3 |
| 355 | */ |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 356 | #if ENABLE_FEAT_DIT |
Andre Przywara | 1f55c41 | 2023-01-26 16:47:52 +0000 | [diff] [blame] | 357 | #if ENABLE_FEAT_DIT == 2 |
| 358 | mrs x8, id_aa64pfr0_el1 |
| 359 | and x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT) |
| 360 | cbz x8, 1f |
| 361 | #endif |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 362 | mov x8, #DIT_BIT |
| 363 | msr DIT, x8 |
Andre Przywara | 1f55c41 | 2023-01-26 16:47:52 +0000 | [diff] [blame] | 364 | 1: |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 365 | #endif /* ENABLE_FEAT_DIT */ |
| 366 | .endm /* set_unset_pstate_bits */ |
| 367 | |
Arvind Ram Prakash | ab28d4b | 2023-10-11 12:10:56 -0500 | [diff] [blame] | 368 | /*------------------------------------------------------------------------- |
| 369 | * This macro checks the ENABLE_FEAT_MPAM state, performs ID register |
| 370 | * check to see if the platform supports MPAM extension and restores MPAM3 |
| 371 | * register value if it is FEAT_STATE_ENABLED/FEAT_STATE_CHECKED. |
| 372 | * |
| 373 | * This is particularly more complicated because we can't check |
| 374 | * if the platform supports MPAM by looking for status of a particular bit |
| 375 | * in the MDCR_EL3 or CPTR_EL3 register like other extensions. |
| 376 | * ------------------------------------------------------------------------ |
| 377 | */ |
| 378 | |
| 379 | .macro restore_mpam3_el3 |
| 380 | #if ENABLE_FEAT_MPAM |
| 381 | #if ENABLE_FEAT_MPAM == 2 |
| 382 | |
| 383 | mrs x8, id_aa64pfr0_el1 |
| 384 | lsr x8, x8, #(ID_AA64PFR0_MPAM_SHIFT) |
| 385 | and x8, x8, #(ID_AA64PFR0_MPAM_MASK) |
| 386 | mrs x7, id_aa64pfr1_el1 |
| 387 | lsr x7, x7, #(ID_AA64PFR1_MPAM_FRAC_SHIFT) |
| 388 | and x7, x7, #(ID_AA64PFR1_MPAM_FRAC_MASK) |
| 389 | orr x7, x7, x8 |
| 390 | cbz x7, no_mpam |
| 391 | #endif |
| 392 | /* ----------------------------------------------------------- |
| 393 | * Restore MPAM3_EL3 register as per context state |
| 394 | * Currently we only enable MPAM for NS world and trap to EL3 |
| 395 | * for MPAM access in lower ELs of Secure and Realm world |
Arvind Ram Prakash | b5d9559 | 2023-11-08 12:28:30 -0600 | [diff] [blame] | 396 | * x9 holds address of the per_world context |
Arvind Ram Prakash | ab28d4b | 2023-10-11 12:10:56 -0500 | [diff] [blame] | 397 | * ----------------------------------------------------------- |
| 398 | */ |
Arvind Ram Prakash | b5d9559 | 2023-11-08 12:28:30 -0600 | [diff] [blame] | 399 | |
| 400 | ldr x17, [x9, #CTX_MPAM3_EL3] |
Arvind Ram Prakash | ab28d4b | 2023-10-11 12:10:56 -0500 | [diff] [blame] | 401 | msr S3_6_C10_C5_0, x17 /* mpam3_el3 */ |
| 402 | |
| 403 | no_mpam: |
| 404 | #endif |
| 405 | .endm /* restore_mpam3_el3 */ |
| 406 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 407 | /* ------------------------------------------------------------------ |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 408 | * The following macro is used to save and restore all the general |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 409 | * purpose and ARMv8.3-PAuth (if enabled) registers. |
Jayanth Dodderi Chidanand | 4ec78ad | 2022-09-19 23:32:08 +0100 | [diff] [blame] | 410 | * It also checks if the Secure Cycle Counter (PMCCNTR_EL0) |
| 411 | * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0 |
| 412 | * needs not to be saved/restored during world switch. |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 413 | * |
| 414 | * Ideally we would only save and restore the callee saved registers |
| 415 | * when a world switch occurs but that type of implementation is more |
| 416 | * complex. So currently we will always save and restore these |
| 417 | * registers on entry and exit of EL3. |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 418 | * clobbers: x18 |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 419 | * ------------------------------------------------------------------ |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 420 | */ |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 421 | .macro save_gp_pmcr_pauth_regs |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 422 | stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] |
| 423 | stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] |
| 424 | stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] |
| 425 | stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] |
| 426 | stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] |
| 427 | stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] |
| 428 | stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] |
| 429 | stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] |
| 430 | stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] |
| 431 | stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] |
| 432 | stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] |
| 433 | stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] |
| 434 | stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] |
| 435 | stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] |
| 436 | stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] |
| 437 | mrs x18, sp_el0 |
| 438 | str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] |
Boyan Karatotev | 05504ba | 2023-02-15 13:21:50 +0000 | [diff] [blame] | 439 | |
| 440 | /* PMUv3 is presumed to be always present */ |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 441 | mrs x9, pmcr_el0 |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 442 | str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 443 | /* Disable cycle counter when event counting is prohibited */ |
Boyan Karatotev | ed85cf7 | 2022-12-06 09:03:42 +0000 | [diff] [blame] | 444 | orr x9, x9, #PMCR_EL0_DP_BIT |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 445 | msr pmcr_el0, x9 |
| 446 | isb |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 447 | #if CTX_INCLUDE_PAUTH_REGS |
| 448 | /* ---------------------------------------------------------- |
| 449 | * Save the ARMv8.3-PAuth keys as they are not banked |
| 450 | * by exception level |
| 451 | * ---------------------------------------------------------- |
| 452 | */ |
| 453 | add x19, sp, #CTX_PAUTH_REGS_OFFSET |
| 454 | |
| 455 | mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ |
| 456 | mrs x21, APIAKeyHi_EL1 |
| 457 | mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ |
| 458 | mrs x23, APIBKeyHi_EL1 |
| 459 | mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ |
| 460 | mrs x25, APDAKeyHi_EL1 |
| 461 | mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ |
| 462 | mrs x27, APDBKeyHi_EL1 |
| 463 | mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ |
| 464 | mrs x29, APGAKeyHi_EL1 |
| 465 | |
| 466 | stp x20, x21, [x19, #CTX_PACIAKEY_LO] |
| 467 | stp x22, x23, [x19, #CTX_PACIBKEY_LO] |
| 468 | stp x24, x25, [x19, #CTX_PACDAKEY_LO] |
| 469 | stp x26, x27, [x19, #CTX_PACDBKEY_LO] |
| 470 | stp x28, x29, [x19, #CTX_PACGAKEY_LO] |
| 471 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 472 | .endm /* save_gp_pmcr_pauth_regs */ |
| 473 | |
| 474 | /* ----------------------------------------------------------------- |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 475 | * This function saves the context and sets the PSTATE to a known |
| 476 | * state, preparing entry to el3. |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 477 | * Save all the general purpose and ARMv8.3-PAuth (if enabled) |
| 478 | * registers. |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 479 | * Then set any of the PSTATE bits that are not set by hardware |
| 480 | * according to the Aarch64.TakeException pseudocode in the Arm |
| 481 | * Architecture Reference Manual to a default value for EL3. |
| 482 | * clobbers: x17 |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 483 | * ----------------------------------------------------------------- |
| 484 | */ |
| 485 | func prepare_el3_entry |
| 486 | save_gp_pmcr_pauth_regs |
Manish Pandey | 62d532a | 2022-11-17 15:47:05 +0000 | [diff] [blame] | 487 | enable_serror_at_el3 |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 488 | /* |
| 489 | * Set the PSTATE bits not described in the Aarch64.TakeException |
| 490 | * pseudocode to their default values. |
| 491 | */ |
| 492 | set_unset_pstate_bits |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 493 | ret |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 494 | endfunc prepare_el3_entry |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 495 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 496 | /* ------------------------------------------------------------------ |
| 497 | * This function restores ARMv8.3-PAuth (if enabled) and all general |
| 498 | * purpose registers except x30 from the CPU context. |
| 499 | * x30 register must be explicitly restored by the caller. |
| 500 | * ------------------------------------------------------------------ |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 501 | */ |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 502 | func restore_gp_pmcr_pauth_regs |
| 503 | #if CTX_INCLUDE_PAUTH_REGS |
| 504 | /* Restore the ARMv8.3 PAuth keys */ |
| 505 | add x10, sp, #CTX_PAUTH_REGS_OFFSET |
| 506 | |
| 507 | ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ |
| 508 | ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ |
| 509 | ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ |
| 510 | ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ |
| 511 | ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ |
| 512 | |
| 513 | msr APIAKeyLo_EL1, x0 |
| 514 | msr APIAKeyHi_EL1, x1 |
| 515 | msr APIBKeyLo_EL1, x2 |
| 516 | msr APIBKeyHi_EL1, x3 |
| 517 | msr APDAKeyLo_EL1, x4 |
| 518 | msr APDAKeyHi_EL1, x5 |
| 519 | msr APDBKeyLo_EL1, x6 |
| 520 | msr APDBKeyHi_EL1, x7 |
| 521 | msr APGAKeyLo_EL1, x8 |
| 522 | msr APGAKeyHi_EL1, x9 |
| 523 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
Boyan Karatotev | 05504ba | 2023-02-15 13:21:50 +0000 | [diff] [blame] | 524 | |
| 525 | /* PMUv3 is presumed to be always present */ |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 526 | ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] |
| 527 | msr pmcr_el0, x0 |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 528 | ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] |
| 529 | ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 530 | ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] |
| 531 | ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] |
| 532 | ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] |
| 533 | ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] |
| 534 | ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] |
| 535 | ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 536 | ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 537 | ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] |
| 538 | ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] |
| 539 | ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] |
| 540 | ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] |
| 541 | ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 542 | ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] |
| 543 | msr sp_el0, x28 |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 544 | ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 545 | ret |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 546 | endfunc restore_gp_pmcr_pauth_regs |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 547 | |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 548 | /* |
| 549 | * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1 |
| 550 | * registers and update EL1 registers to disable stage1 and stage2 |
| 551 | * page table walk |
| 552 | */ |
| 553 | func save_and_update_ptw_el1_sys_regs |
| 554 | /* ---------------------------------------------------------- |
| 555 | * Save only sctlr_el1 and tcr_el1 registers |
| 556 | * ---------------------------------------------------------- |
| 557 | */ |
| 558 | mrs x29, sctlr_el1 |
| 559 | str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)] |
| 560 | mrs x29, tcr_el1 |
| 561 | str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)] |
| 562 | |
| 563 | /* ------------------------------------------------------------ |
| 564 | * Must follow below order in order to disable page table |
| 565 | * walk for lower ELs (EL1 and EL0). First step ensures that |
| 566 | * page table walk is disabled for stage1 and second step |
| 567 | * ensures that page table walker should use TCR_EL1.EPDx |
| 568 | * bits to perform address translation. ISB ensures that CPU |
| 569 | * does these 2 steps in order. |
| 570 | * |
| 571 | * 1. Update TCR_EL1.EPDx bits to disable page table walk by |
| 572 | * stage1. |
| 573 | * 2. Enable MMU bit to avoid identity mapping via stage2 |
| 574 | * and force TCR_EL1.EPDx to be used by the page table |
| 575 | * walker. |
| 576 | * ------------------------------------------------------------ |
| 577 | */ |
| 578 | orr x29, x29, #(TCR_EPD0_BIT) |
| 579 | orr x29, x29, #(TCR_EPD1_BIT) |
| 580 | msr tcr_el1, x29 |
| 581 | isb |
| 582 | mrs x29, sctlr_el1 |
| 583 | orr x29, x29, #SCTLR_M_BIT |
| 584 | msr sctlr_el1, x29 |
| 585 | isb |
| 586 | |
| 587 | ret |
| 588 | endfunc save_and_update_ptw_el1_sys_regs |
| 589 | |
Elizabeth Ho | 4fc00d2 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 590 | /* ----------------------------------------------------------------- |
| 591 | * The below macro returns the address of the per_world context for |
| 592 | * the security state, retrieved through "get_security_state" macro. |
| 593 | * The per_world context address is returned in the register argument. |
| 594 | * Clobbers: x9, x10 |
| 595 | * ------------------------------------------------------------------ |
| 596 | */ |
| 597 | |
| 598 | .macro get_per_world_context _reg:req |
| 599 | ldr x10, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] |
| 600 | get_security_state x9, x10 |
Jayanth Dodderi Chidanand | 56aa382 | 2023-12-11 11:22:02 +0000 | [diff] [blame] | 601 | mov_imm x10, (CTX_PERWORLD_EL3STATE_END - CTX_CPTR_EL3) |
Elizabeth Ho | 4fc00d2 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 602 | mul x9, x9, x10 |
| 603 | adrp x10, per_world_context |
| 604 | add x10, x10, :lo12:per_world_context |
| 605 | add x9, x9, x10 |
| 606 | mov \_reg, x9 |
| 607 | .endm |
| 608 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 609 | /* ------------------------------------------------------------------ |
| 610 | * This routine assumes that the SP_EL3 is pointing to a valid |
| 611 | * context structure from where the gp regs and other special |
| 612 | * registers can be retrieved. |
| 613 | * ------------------------------------------------------------------ |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 614 | */ |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 615 | func el3_exit |
Jan Dabros | fa01598 | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 616 | #if ENABLE_ASSERTIONS |
| 617 | /* el3_exit assumes SP_EL0 on entry */ |
| 618 | mrs x17, spsel |
| 619 | cmp x17, #MODE_SP_EL0 |
| 620 | ASM_ASSERT(eq) |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 621 | #endif /* ENABLE_ASSERTIONS */ |
Jan Dabros | fa01598 | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 622 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 623 | /* ---------------------------------------------------------- |
| 624 | * Save the current SP_EL0 i.e. the EL3 runtime stack which |
| 625 | * will be used for handling the next SMC. |
| 626 | * Then switch to SP_EL3. |
| 627 | * ---------------------------------------------------------- |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 628 | */ |
| 629 | mov x17, sp |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 630 | msr spsel, #MODE_SP_ELX |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 631 | str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] |
| 632 | |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 633 | /* ---------------------------------------------------------- |
Arunachalam Ganapathy | cac7d16 | 2021-07-08 09:35:57 +0100 | [diff] [blame] | 634 | * Restore CPTR_EL3. |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 635 | * ZCR is only restored if SVE is supported and enabled. |
| 636 | * Synchronization is required before zcr_el3 is addressed. |
| 637 | * ---------------------------------------------------------- |
| 638 | */ |
Elizabeth Ho | 4fc00d2 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 639 | |
| 640 | /* The address of the per_world context is stored in x9 */ |
| 641 | get_per_world_context x9 |
| 642 | |
| 643 | ldp x19, x20, [x9, #CTX_CPTR_EL3] |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 644 | msr cptr_el3, x19 |
| 645 | |
Boyan Karatotev | 8ae58f0 | 2023-04-20 11:00:50 +0100 | [diff] [blame] | 646 | #if IMAGE_BL31 |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 647 | ands x19, x19, #CPTR_EZ_BIT |
| 648 | beq sve_not_enabled |
| 649 | |
| 650 | isb |
| 651 | msr S3_6_C1_C2_0, x20 /* zcr_el3 */ |
| 652 | sve_not_enabled: |
Arvind Ram Prakash | ab28d4b | 2023-10-11 12:10:56 -0500 | [diff] [blame] | 653 | |
| 654 | restore_mpam3_el3 |
| 655 | |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 656 | #endif /* IMAGE_BL31 */ |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 657 | |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 658 | #if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 659 | /* ---------------------------------------------------------- |
| 660 | * Restore mitigation state as it was on entry to EL3 |
| 661 | * ---------------------------------------------------------- |
| 662 | */ |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 663 | ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 664 | cbz x17, 1f |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 665 | blr x17 |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 666 | 1: |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 667 | #endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */ |
| 668 | |
Manish Pandey | 6b5721f | 2023-06-26 17:46:14 +0100 | [diff] [blame] | 669 | #if IMAGE_BL31 |
| 670 | synchronize_errors |
| 671 | #endif /* IMAGE_BL31 */ |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 672 | |
Manish Pandey | 53bc59a | 2022-11-17 14:43:15 +0000 | [diff] [blame] | 673 | /* ---------------------------------------------------------- |
| 674 | * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET |
| 675 | * ---------------------------------------------------------- |
| 676 | */ |
| 677 | ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] |
| 678 | ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] |
| 679 | msr scr_el3, x18 |
| 680 | msr spsr_el3, x16 |
| 681 | msr elr_el3, x17 |
| 682 | |
| 683 | restore_ptw_el1_sys_regs |
| 684 | |
| 685 | /* ---------------------------------------------------------- |
| 686 | * Restore general purpose (including x30), PMCR_EL0 and |
| 687 | * ARMv8.3-PAuth registers. |
| 688 | * Exit EL3 via ERET to a lower exception level. |
| 689 | * ---------------------------------------------------------- |
| 690 | */ |
| 691 | bl restore_gp_pmcr_pauth_regs |
| 692 | ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] |
| 693 | |
Madhukar Pappireddy | fba2572 | 2020-07-24 03:27:12 -0500 | [diff] [blame] | 694 | #ifdef IMAGE_BL31 |
Manish Pandey | 07952fb | 2023-05-25 13:46:14 +0100 | [diff] [blame] | 695 | /* Clear the EL3 flag as we are exiting el3 */ |
| 696 | str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 697 | #endif /* IMAGE_BL31 */ |
| 698 | |
Anthony Steinhauser | 0f7e601 | 2020-01-07 15:44:06 -0800 | [diff] [blame] | 699 | exception_return |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 700 | |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 701 | endfunc el3_exit |