Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 1 | /* |
Govindraj Raja | 77922ca | 2024-01-25 08:09:39 -0600 | [diff] [blame] | 2 | * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 3 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 4 | * SPDX-License-Identifier: BSD-3-Clause |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 5 | */ |
| 6 | |
Dan Handley | 2bd4ef2 | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 7 | #include <arch.h> |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 8 | #include <asm_macros.S> |
Jan Dabros | fa01598 | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 9 | #include <assert_macros.S> |
Dan Handley | 2bd4ef2 | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 10 | #include <context.h> |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 11 | #include <el3_common_macros.S> |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 12 | |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 13 | #if CTX_INCLUDE_FPREGS |
| 14 | .global fpregs_context_save |
| 15 | .global fpregs_context_restore |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 16 | #endif /* CTX_INCLUDE_FPREGS */ |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 17 | .global prepare_el3_entry |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 18 | .global restore_gp_pmcr_pauth_regs |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 19 | .global save_and_update_ptw_el1_sys_regs |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 20 | .global el3_exit |
| 21 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 22 | /* ------------------------------------------------------------------ |
| 23 | * The following function follows the aapcs_64 strictly to use |
| 24 | * x9-x17 (temporary caller-saved registers according to AArch64 PCS) |
| 25 | * to save floating point register context. It assumes that 'x0' is |
| 26 | * pointing to a 'fp_regs' structure where the register context will |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 27 | * be saved. |
| 28 | * |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 29 | * Access to VFP registers will trap if CPTR_EL3.TFP is set. |
| 30 | * However currently we don't use VFP registers nor set traps in |
| 31 | * Trusted Firmware, and assume it's cleared. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 32 | * |
| 33 | * TODO: Revisit when VFP is used in secure world |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 34 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 35 | */ |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 36 | #if CTX_INCLUDE_FPREGS |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 37 | func fpregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 38 | stp q0, q1, [x0, #CTX_FP_Q0] |
| 39 | stp q2, q3, [x0, #CTX_FP_Q2] |
| 40 | stp q4, q5, [x0, #CTX_FP_Q4] |
| 41 | stp q6, q7, [x0, #CTX_FP_Q6] |
| 42 | stp q8, q9, [x0, #CTX_FP_Q8] |
| 43 | stp q10, q11, [x0, #CTX_FP_Q10] |
| 44 | stp q12, q13, [x0, #CTX_FP_Q12] |
| 45 | stp q14, q15, [x0, #CTX_FP_Q14] |
| 46 | stp q16, q17, [x0, #CTX_FP_Q16] |
| 47 | stp q18, q19, [x0, #CTX_FP_Q18] |
| 48 | stp q20, q21, [x0, #CTX_FP_Q20] |
| 49 | stp q22, q23, [x0, #CTX_FP_Q22] |
| 50 | stp q24, q25, [x0, #CTX_FP_Q24] |
| 51 | stp q26, q27, [x0, #CTX_FP_Q26] |
| 52 | stp q28, q29, [x0, #CTX_FP_Q28] |
| 53 | stp q30, q31, [x0, #CTX_FP_Q30] |
| 54 | |
| 55 | mrs x9, fpsr |
| 56 | str x9, [x0, #CTX_FP_FPSR] |
| 57 | |
| 58 | mrs x10, fpcr |
| 59 | str x10, [x0, #CTX_FP_FPCR] |
| 60 | |
David Cunado | d1a1fd4 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 61 | #if CTX_INCLUDE_AARCH32_REGS |
| 62 | mrs x11, fpexc32_el2 |
| 63 | str x11, [x0, #CTX_FP_FPEXC32_EL2] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 64 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 65 | ret |
Kévin Petit | a877c25 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 66 | endfunc fpregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 67 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 68 | /* ------------------------------------------------------------------ |
| 69 | * The following function follows the aapcs_64 strictly to use x9-x17 |
| 70 | * (temporary caller-saved registers according to AArch64 PCS) to |
| 71 | * restore floating point register context. It assumes that 'x0' is |
| 72 | * pointing to a 'fp_regs' structure from where the register context |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 73 | * will be restored. |
| 74 | * |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 75 | * Access to VFP registers will trap if CPTR_EL3.TFP is set. |
| 76 | * However currently we don't use VFP registers nor set traps in |
| 77 | * Trusted Firmware, and assume it's cleared. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 78 | * |
| 79 | * TODO: Revisit when VFP is used in secure world |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 80 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 81 | */ |
Andrew Thoelke | 38bde41 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 82 | func fpregs_context_restore |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 83 | ldp q0, q1, [x0, #CTX_FP_Q0] |
| 84 | ldp q2, q3, [x0, #CTX_FP_Q2] |
| 85 | ldp q4, q5, [x0, #CTX_FP_Q4] |
| 86 | ldp q6, q7, [x0, #CTX_FP_Q6] |
| 87 | ldp q8, q9, [x0, #CTX_FP_Q8] |
| 88 | ldp q10, q11, [x0, #CTX_FP_Q10] |
| 89 | ldp q12, q13, [x0, #CTX_FP_Q12] |
| 90 | ldp q14, q15, [x0, #CTX_FP_Q14] |
| 91 | ldp q16, q17, [x0, #CTX_FP_Q16] |
| 92 | ldp q18, q19, [x0, #CTX_FP_Q18] |
| 93 | ldp q20, q21, [x0, #CTX_FP_Q20] |
| 94 | ldp q22, q23, [x0, #CTX_FP_Q22] |
| 95 | ldp q24, q25, [x0, #CTX_FP_Q24] |
| 96 | ldp q26, q27, [x0, #CTX_FP_Q26] |
| 97 | ldp q28, q29, [x0, #CTX_FP_Q28] |
| 98 | ldp q30, q31, [x0, #CTX_FP_Q30] |
| 99 | |
| 100 | ldr x9, [x0, #CTX_FP_FPSR] |
| 101 | msr fpsr, x9 |
| 102 | |
Soby Mathew | e77e116 | 2015-12-03 09:42:50 +0000 | [diff] [blame] | 103 | ldr x10, [x0, #CTX_FP_FPCR] |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 104 | msr fpcr, x10 |
| 105 | |
David Cunado | d1a1fd4 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 106 | #if CTX_INCLUDE_AARCH32_REGS |
| 107 | ldr x11, [x0, #CTX_FP_FPEXC32_EL2] |
| 108 | msr fpexc32_el2, x11 |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 109 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
| 110 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 111 | /* |
| 112 | * No explict ISB required here as ERET to |
Sandrine Bailleux | f4119ec | 2015-12-17 13:58:58 +0000 | [diff] [blame] | 113 | * switch to secure EL1 or non-secure world |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 114 | * covers it |
| 115 | */ |
| 116 | |
| 117 | ret |
Kévin Petit | a877c25 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 118 | endfunc fpregs_context_restore |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 119 | #endif /* CTX_INCLUDE_FPREGS */ |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 120 | |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 121 | /* |
Manish Pandey | 62d532a | 2022-11-17 15:47:05 +0000 | [diff] [blame] | 122 | * Set SCR_EL3.EA bit to enable SErrors at EL3 |
| 123 | */ |
| 124 | .macro enable_serror_at_el3 |
| 125 | mrs x8, scr_el3 |
| 126 | orr x8, x8, #SCR_EA_BIT |
| 127 | msr scr_el3, x8 |
| 128 | .endm |
| 129 | |
| 130 | /* |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 131 | * Set the PSTATE bits not set when the exception was taken as |
| 132 | * described in the AArch64.TakeException() pseudocode function |
| 133 | * in ARM DDI 0487F.c page J1-7635 to a default value. |
| 134 | */ |
| 135 | .macro set_unset_pstate_bits |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 136 | /* |
| 137 | * If Data Independent Timing (DIT) functionality is implemented, |
| 138 | * always enable DIT in EL3 |
| 139 | */ |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 140 | #if ENABLE_FEAT_DIT |
Andre Przywara | 1f55c41 | 2023-01-26 16:47:52 +0000 | [diff] [blame] | 141 | #if ENABLE_FEAT_DIT == 2 |
| 142 | mrs x8, id_aa64pfr0_el1 |
| 143 | and x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT) |
| 144 | cbz x8, 1f |
| 145 | #endif |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 146 | mov x8, #DIT_BIT |
| 147 | msr DIT, x8 |
Andre Przywara | 1f55c41 | 2023-01-26 16:47:52 +0000 | [diff] [blame] | 148 | 1: |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 149 | #endif /* ENABLE_FEAT_DIT */ |
| 150 | .endm /* set_unset_pstate_bits */ |
| 151 | |
Arvind Ram Prakash | ab28d4b | 2023-10-11 12:10:56 -0500 | [diff] [blame] | 152 | /*------------------------------------------------------------------------- |
| 153 | * This macro checks the ENABLE_FEAT_MPAM state, performs ID register |
| 154 | * check to see if the platform supports MPAM extension and restores MPAM3 |
| 155 | * register value if it is FEAT_STATE_ENABLED/FEAT_STATE_CHECKED. |
| 156 | * |
| 157 | * This is particularly more complicated because we can't check |
| 158 | * if the platform supports MPAM by looking for status of a particular bit |
| 159 | * in the MDCR_EL3 or CPTR_EL3 register like other extensions. |
| 160 | * ------------------------------------------------------------------------ |
| 161 | */ |
| 162 | |
| 163 | .macro restore_mpam3_el3 |
| 164 | #if ENABLE_FEAT_MPAM |
| 165 | #if ENABLE_FEAT_MPAM == 2 |
| 166 | |
| 167 | mrs x8, id_aa64pfr0_el1 |
| 168 | lsr x8, x8, #(ID_AA64PFR0_MPAM_SHIFT) |
| 169 | and x8, x8, #(ID_AA64PFR0_MPAM_MASK) |
| 170 | mrs x7, id_aa64pfr1_el1 |
| 171 | lsr x7, x7, #(ID_AA64PFR1_MPAM_FRAC_SHIFT) |
| 172 | and x7, x7, #(ID_AA64PFR1_MPAM_FRAC_MASK) |
| 173 | orr x7, x7, x8 |
| 174 | cbz x7, no_mpam |
| 175 | #endif |
| 176 | /* ----------------------------------------------------------- |
| 177 | * Restore MPAM3_EL3 register as per context state |
| 178 | * Currently we only enable MPAM for NS world and trap to EL3 |
| 179 | * for MPAM access in lower ELs of Secure and Realm world |
Arvind Ram Prakash | b5d9559 | 2023-11-08 12:28:30 -0600 | [diff] [blame] | 180 | * x9 holds address of the per_world context |
Arvind Ram Prakash | ab28d4b | 2023-10-11 12:10:56 -0500 | [diff] [blame] | 181 | * ----------------------------------------------------------- |
| 182 | */ |
Arvind Ram Prakash | b5d9559 | 2023-11-08 12:28:30 -0600 | [diff] [blame] | 183 | |
| 184 | ldr x17, [x9, #CTX_MPAM3_EL3] |
Arvind Ram Prakash | ab28d4b | 2023-10-11 12:10:56 -0500 | [diff] [blame] | 185 | msr S3_6_C10_C5_0, x17 /* mpam3_el3 */ |
| 186 | |
| 187 | no_mpam: |
| 188 | #endif |
| 189 | .endm /* restore_mpam3_el3 */ |
| 190 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 191 | /* ------------------------------------------------------------------ |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 192 | * The following macro is used to save and restore all the general |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 193 | * purpose and ARMv8.3-PAuth (if enabled) registers. |
Jayanth Dodderi Chidanand | 4ec78ad | 2022-09-19 23:32:08 +0100 | [diff] [blame] | 194 | * It also checks if the Secure Cycle Counter (PMCCNTR_EL0) |
| 195 | * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0 |
| 196 | * needs not to be saved/restored during world switch. |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 197 | * |
| 198 | * Ideally we would only save and restore the callee saved registers |
| 199 | * when a world switch occurs but that type of implementation is more |
| 200 | * complex. So currently we will always save and restore these |
| 201 | * registers on entry and exit of EL3. |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 202 | * clobbers: x18 |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 203 | * ------------------------------------------------------------------ |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 204 | */ |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 205 | .macro save_gp_pmcr_pauth_regs |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 206 | stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] |
| 207 | stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] |
| 208 | stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] |
| 209 | stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] |
| 210 | stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] |
| 211 | stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] |
| 212 | stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] |
| 213 | stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] |
| 214 | stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] |
| 215 | stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] |
| 216 | stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] |
| 217 | stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] |
| 218 | stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] |
| 219 | stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] |
| 220 | stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] |
| 221 | mrs x18, sp_el0 |
| 222 | str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] |
Boyan Karatotev | 05504ba | 2023-02-15 13:21:50 +0000 | [diff] [blame] | 223 | |
| 224 | /* PMUv3 is presumed to be always present */ |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 225 | mrs x9, pmcr_el0 |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 226 | str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 227 | /* Disable cycle counter when event counting is prohibited */ |
Boyan Karatotev | ed85cf7 | 2022-12-06 09:03:42 +0000 | [diff] [blame] | 228 | orr x9, x9, #PMCR_EL0_DP_BIT |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 229 | msr pmcr_el0, x9 |
| 230 | isb |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 231 | #if CTX_INCLUDE_PAUTH_REGS |
| 232 | /* ---------------------------------------------------------- |
| 233 | * Save the ARMv8.3-PAuth keys as they are not banked |
| 234 | * by exception level |
| 235 | * ---------------------------------------------------------- |
| 236 | */ |
| 237 | add x19, sp, #CTX_PAUTH_REGS_OFFSET |
| 238 | |
| 239 | mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ |
| 240 | mrs x21, APIAKeyHi_EL1 |
| 241 | mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ |
| 242 | mrs x23, APIBKeyHi_EL1 |
| 243 | mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ |
| 244 | mrs x25, APDAKeyHi_EL1 |
| 245 | mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ |
| 246 | mrs x27, APDBKeyHi_EL1 |
| 247 | mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ |
| 248 | mrs x29, APGAKeyHi_EL1 |
| 249 | |
| 250 | stp x20, x21, [x19, #CTX_PACIAKEY_LO] |
| 251 | stp x22, x23, [x19, #CTX_PACIBKEY_LO] |
| 252 | stp x24, x25, [x19, #CTX_PACDAKEY_LO] |
| 253 | stp x26, x27, [x19, #CTX_PACDBKEY_LO] |
| 254 | stp x28, x29, [x19, #CTX_PACGAKEY_LO] |
| 255 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 256 | .endm /* save_gp_pmcr_pauth_regs */ |
| 257 | |
| 258 | /* ----------------------------------------------------------------- |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 259 | * This function saves the context and sets the PSTATE to a known |
| 260 | * state, preparing entry to el3. |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 261 | * Save all the general purpose and ARMv8.3-PAuth (if enabled) |
| 262 | * registers. |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 263 | * Then set any of the PSTATE bits that are not set by hardware |
| 264 | * according to the Aarch64.TakeException pseudocode in the Arm |
| 265 | * Architecture Reference Manual to a default value for EL3. |
| 266 | * clobbers: x17 |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 267 | * ----------------------------------------------------------------- |
| 268 | */ |
| 269 | func prepare_el3_entry |
| 270 | save_gp_pmcr_pauth_regs |
Manish Pandey | 62d532a | 2022-11-17 15:47:05 +0000 | [diff] [blame] | 271 | enable_serror_at_el3 |
Daniel Boulby | 928747f | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 272 | /* |
| 273 | * Set the PSTATE bits not described in the Aarch64.TakeException |
| 274 | * pseudocode to their default values. |
| 275 | */ |
| 276 | set_unset_pstate_bits |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 277 | ret |
Daniel Boulby | 95fb1aa | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 278 | endfunc prepare_el3_entry |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 279 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 280 | /* ------------------------------------------------------------------ |
| 281 | * This function restores ARMv8.3-PAuth (if enabled) and all general |
| 282 | * purpose registers except x30 from the CPU context. |
| 283 | * x30 register must be explicitly restored by the caller. |
| 284 | * ------------------------------------------------------------------ |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 285 | */ |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 286 | func restore_gp_pmcr_pauth_regs |
| 287 | #if CTX_INCLUDE_PAUTH_REGS |
| 288 | /* Restore the ARMv8.3 PAuth keys */ |
| 289 | add x10, sp, #CTX_PAUTH_REGS_OFFSET |
| 290 | |
| 291 | ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ |
| 292 | ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ |
| 293 | ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ |
| 294 | ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ |
| 295 | ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ |
| 296 | |
| 297 | msr APIAKeyLo_EL1, x0 |
| 298 | msr APIAKeyHi_EL1, x1 |
| 299 | msr APIBKeyLo_EL1, x2 |
| 300 | msr APIBKeyHi_EL1, x3 |
| 301 | msr APDAKeyLo_EL1, x4 |
| 302 | msr APDAKeyHi_EL1, x5 |
| 303 | msr APDBKeyLo_EL1, x6 |
| 304 | msr APDBKeyHi_EL1, x7 |
| 305 | msr APGAKeyLo_EL1, x8 |
| 306 | msr APGAKeyHi_EL1, x9 |
| 307 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
Boyan Karatotev | 05504ba | 2023-02-15 13:21:50 +0000 | [diff] [blame] | 308 | |
| 309 | /* PMUv3 is presumed to be always present */ |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 310 | ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] |
| 311 | msr pmcr_el0, x0 |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 312 | ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] |
| 313 | ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 314 | ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] |
| 315 | ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] |
| 316 | ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] |
| 317 | ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] |
| 318 | ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] |
| 319 | ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 320 | ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 321 | ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] |
| 322 | ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] |
| 323 | ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] |
| 324 | ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] |
| 325 | ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 326 | ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] |
| 327 | msr sp_el0, x28 |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 328 | ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 329 | ret |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 330 | endfunc restore_gp_pmcr_pauth_regs |
Jeenu Viswambharan | 23d05a8 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 331 | |
Manish V Badarkhe | e07e808 | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 332 | /* |
| 333 | * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1 |
| 334 | * registers and update EL1 registers to disable stage1 and stage2 |
| 335 | * page table walk |
| 336 | */ |
| 337 | func save_and_update_ptw_el1_sys_regs |
| 338 | /* ---------------------------------------------------------- |
| 339 | * Save only sctlr_el1 and tcr_el1 registers |
| 340 | * ---------------------------------------------------------- |
| 341 | */ |
| 342 | mrs x29, sctlr_el1 |
| 343 | str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_SCTLR_EL1)] |
| 344 | mrs x29, tcr_el1 |
| 345 | str x29, [sp, #(CTX_EL1_SYSREGS_OFFSET + CTX_TCR_EL1)] |
| 346 | |
| 347 | /* ------------------------------------------------------------ |
| 348 | * Must follow below order in order to disable page table |
| 349 | * walk for lower ELs (EL1 and EL0). First step ensures that |
| 350 | * page table walk is disabled for stage1 and second step |
| 351 | * ensures that page table walker should use TCR_EL1.EPDx |
| 352 | * bits to perform address translation. ISB ensures that CPU |
| 353 | * does these 2 steps in order. |
| 354 | * |
| 355 | * 1. Update TCR_EL1.EPDx bits to disable page table walk by |
| 356 | * stage1. |
| 357 | * 2. Enable MMU bit to avoid identity mapping via stage2 |
| 358 | * and force TCR_EL1.EPDx to be used by the page table |
| 359 | * walker. |
| 360 | * ------------------------------------------------------------ |
| 361 | */ |
| 362 | orr x29, x29, #(TCR_EPD0_BIT) |
| 363 | orr x29, x29, #(TCR_EPD1_BIT) |
| 364 | msr tcr_el1, x29 |
| 365 | isb |
| 366 | mrs x29, sctlr_el1 |
| 367 | orr x29, x29, #SCTLR_M_BIT |
| 368 | msr sctlr_el1, x29 |
| 369 | isb |
| 370 | |
| 371 | ret |
| 372 | endfunc save_and_update_ptw_el1_sys_regs |
| 373 | |
Elizabeth Ho | 4fc00d2 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 374 | /* ----------------------------------------------------------------- |
| 375 | * The below macro returns the address of the per_world context for |
| 376 | * the security state, retrieved through "get_security_state" macro. |
| 377 | * The per_world context address is returned in the register argument. |
| 378 | * Clobbers: x9, x10 |
| 379 | * ------------------------------------------------------------------ |
| 380 | */ |
| 381 | |
| 382 | .macro get_per_world_context _reg:req |
| 383 | ldr x10, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] |
| 384 | get_security_state x9, x10 |
Jayanth Dodderi Chidanand | 56aa382 | 2023-12-11 11:22:02 +0000 | [diff] [blame] | 385 | mov_imm x10, (CTX_PERWORLD_EL3STATE_END - CTX_CPTR_EL3) |
Elizabeth Ho | 4fc00d2 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 386 | mul x9, x9, x10 |
| 387 | adrp x10, per_world_context |
| 388 | add x10, x10, :lo12:per_world_context |
| 389 | add x9, x9, x10 |
| 390 | mov \_reg, x9 |
| 391 | .endm |
| 392 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 393 | /* ------------------------------------------------------------------ |
| 394 | * This routine assumes that the SP_EL3 is pointing to a valid |
| 395 | * context structure from where the gp regs and other special |
| 396 | * registers can be retrieved. |
| 397 | * ------------------------------------------------------------------ |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 398 | */ |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 399 | func el3_exit |
Jan Dabros | fa01598 | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 400 | #if ENABLE_ASSERTIONS |
| 401 | /* el3_exit assumes SP_EL0 on entry */ |
| 402 | mrs x17, spsel |
| 403 | cmp x17, #MODE_SP_EL0 |
| 404 | ASM_ASSERT(eq) |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 405 | #endif /* ENABLE_ASSERTIONS */ |
Jan Dabros | fa01598 | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 406 | |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 407 | /* ---------------------------------------------------------- |
| 408 | * Save the current SP_EL0 i.e. the EL3 runtime stack which |
| 409 | * will be used for handling the next SMC. |
| 410 | * Then switch to SP_EL3. |
| 411 | * ---------------------------------------------------------- |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 412 | */ |
| 413 | mov x17, sp |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 414 | msr spsel, #MODE_SP_ELX |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 415 | str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] |
| 416 | |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 417 | /* ---------------------------------------------------------- |
Arunachalam Ganapathy | cac7d16 | 2021-07-08 09:35:57 +0100 | [diff] [blame] | 418 | * Restore CPTR_EL3. |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 419 | * ZCR is only restored if SVE is supported and enabled. |
| 420 | * Synchronization is required before zcr_el3 is addressed. |
| 421 | * ---------------------------------------------------------- |
| 422 | */ |
Elizabeth Ho | 4fc00d2 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 423 | |
| 424 | /* The address of the per_world context is stored in x9 */ |
| 425 | get_per_world_context x9 |
| 426 | |
| 427 | ldp x19, x20, [x9, #CTX_CPTR_EL3] |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 428 | msr cptr_el3, x19 |
| 429 | |
Boyan Karatotev | 8ae58f0 | 2023-04-20 11:00:50 +0100 | [diff] [blame] | 430 | #if IMAGE_BL31 |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 431 | ands x19, x19, #CPTR_EZ_BIT |
| 432 | beq sve_not_enabled |
| 433 | |
| 434 | isb |
| 435 | msr S3_6_C1_C2_0, x20 /* zcr_el3 */ |
| 436 | sve_not_enabled: |
Arvind Ram Prakash | ab28d4b | 2023-10-11 12:10:56 -0500 | [diff] [blame] | 437 | |
| 438 | restore_mpam3_el3 |
| 439 | |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 440 | #endif /* IMAGE_BL31 */ |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 441 | |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 442 | #if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 443 | /* ---------------------------------------------------------- |
| 444 | * Restore mitigation state as it was on entry to EL3 |
| 445 | * ---------------------------------------------------------- |
| 446 | */ |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 447 | ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 448 | cbz x17, 1f |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 449 | blr x17 |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 450 | 1: |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 451 | #endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */ |
| 452 | |
Manish Pandey | 6b5721f | 2023-06-26 17:46:14 +0100 | [diff] [blame] | 453 | #if IMAGE_BL31 |
| 454 | synchronize_errors |
| 455 | #endif /* IMAGE_BL31 */ |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 456 | |
Manish Pandey | 53bc59a | 2022-11-17 14:43:15 +0000 | [diff] [blame] | 457 | /* ---------------------------------------------------------- |
| 458 | * Restore SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET |
| 459 | * ---------------------------------------------------------- |
| 460 | */ |
| 461 | ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] |
| 462 | ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] |
| 463 | msr scr_el3, x18 |
| 464 | msr spsr_el3, x16 |
| 465 | msr elr_el3, x17 |
| 466 | |
| 467 | restore_ptw_el1_sys_regs |
| 468 | |
| 469 | /* ---------------------------------------------------------- |
| 470 | * Restore general purpose (including x30), PMCR_EL0 and |
| 471 | * ARMv8.3-PAuth registers. |
| 472 | * Exit EL3 via ERET to a lower exception level. |
| 473 | * ---------------------------------------------------------- |
| 474 | */ |
| 475 | bl restore_gp_pmcr_pauth_regs |
| 476 | ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] |
| 477 | |
Madhukar Pappireddy | fba2572 | 2020-07-24 03:27:12 -0500 | [diff] [blame] | 478 | #ifdef IMAGE_BL31 |
Manish Pandey | 07952fb | 2023-05-25 13:46:14 +0100 | [diff] [blame] | 479 | /* Clear the EL3 flag as we are exiting el3 */ |
| 480 | str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG] |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 481 | #endif /* IMAGE_BL31 */ |
| 482 | |
Anthony Steinhauser | 0f7e601 | 2020-01-07 15:44:06 -0800 | [diff] [blame] | 483 | exception_return |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 484 | |
Yatharth Kochar | 6c0566c | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 485 | endfunc el3_exit |