Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 1 | /* |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 2 | * Copyright (c) 2013-2022, Arm Limited and Contributors. All rights reserved. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 3 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 4 | * SPDX-License-Identifier: BSD-3-Clause |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 5 | */ |
| 6 | |
Antonio Nino Diaz | 864ca6f | 2018-10-31 15:25:35 +0000 | [diff] [blame] | 7 | #ifndef CONTEXT_H |
| 8 | #define CONTEXT_H |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 9 | |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 10 | #include <lib/utils_def.h> |
Jeenu Viswambharan | 96c7df0 | 2017-11-30 12:54:15 +0000 | [diff] [blame] | 11 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 12 | /******************************************************************************* |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 13 | * Constants that allow assembler code to access members of and the 'gp_regs' |
| 14 | * structure at their correct offsets. |
| 15 | ******************************************************************************/ |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 16 | #define CTX_GPREGS_OFFSET U(0x0) |
| 17 | #define CTX_GPREG_X0 U(0x0) |
| 18 | #define CTX_GPREG_X1 U(0x8) |
| 19 | #define CTX_GPREG_X2 U(0x10) |
| 20 | #define CTX_GPREG_X3 U(0x18) |
| 21 | #define CTX_GPREG_X4 U(0x20) |
| 22 | #define CTX_GPREG_X5 U(0x28) |
| 23 | #define CTX_GPREG_X6 U(0x30) |
| 24 | #define CTX_GPREG_X7 U(0x38) |
| 25 | #define CTX_GPREG_X8 U(0x40) |
| 26 | #define CTX_GPREG_X9 U(0x48) |
| 27 | #define CTX_GPREG_X10 U(0x50) |
| 28 | #define CTX_GPREG_X11 U(0x58) |
| 29 | #define CTX_GPREG_X12 U(0x60) |
| 30 | #define CTX_GPREG_X13 U(0x68) |
| 31 | #define CTX_GPREG_X14 U(0x70) |
| 32 | #define CTX_GPREG_X15 U(0x78) |
| 33 | #define CTX_GPREG_X16 U(0x80) |
| 34 | #define CTX_GPREG_X17 U(0x88) |
| 35 | #define CTX_GPREG_X18 U(0x90) |
| 36 | #define CTX_GPREG_X19 U(0x98) |
| 37 | #define CTX_GPREG_X20 U(0xa0) |
| 38 | #define CTX_GPREG_X21 U(0xa8) |
| 39 | #define CTX_GPREG_X22 U(0xb0) |
| 40 | #define CTX_GPREG_X23 U(0xb8) |
| 41 | #define CTX_GPREG_X24 U(0xc0) |
| 42 | #define CTX_GPREG_X25 U(0xc8) |
| 43 | #define CTX_GPREG_X26 U(0xd0) |
| 44 | #define CTX_GPREG_X27 U(0xd8) |
| 45 | #define CTX_GPREG_X28 U(0xe0) |
| 46 | #define CTX_GPREG_X29 U(0xe8) |
| 47 | #define CTX_GPREG_LR U(0xf0) |
| 48 | #define CTX_GPREG_SP_EL0 U(0xf8) |
| 49 | #define CTX_GPREGS_END U(0x100) |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 50 | |
| 51 | /******************************************************************************* |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 52 | * Constants that allow assembler code to access members of and the 'el3_state' |
| 53 | * structure at their correct offsets. Note that some of the registers are only |
| 54 | * 32-bits wide but are stored as 64-bit values for convenience |
| 55 | ******************************************************************************/ |
Dimitris Papastamos | b63c6f1 | 2018-01-11 15:29:36 +0000 | [diff] [blame] | 56 | #define CTX_EL3STATE_OFFSET (CTX_GPREGS_OFFSET + CTX_GPREGS_END) |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 57 | #define CTX_SCR_EL3 U(0x0) |
Jeenu Viswambharan | 96c7df0 | 2017-11-30 12:54:15 +0000 | [diff] [blame] | 58 | #define CTX_ESR_EL3 U(0x8) |
| 59 | #define CTX_RUNTIME_SP U(0x10) |
| 60 | #define CTX_SPSR_EL3 U(0x18) |
| 61 | #define CTX_ELR_EL3 U(0x20) |
Alexei Fedorov | 503bbf3 | 2019-08-13 15:17:53 +0100 | [diff] [blame] | 62 | #define CTX_PMCR_EL0 U(0x28) |
Madhukar Pappireddy | fba2572 | 2020-07-24 03:27:12 -0500 | [diff] [blame] | 63 | #define CTX_IS_IN_EL3 U(0x30) |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 64 | #define CTX_CPTR_EL3 U(0x38) |
| 65 | #define CTX_ZCR_EL3 U(0x40) |
| 66 | #define CTX_EL3STATE_END U(0x50) /* Align to the next 16 byte boundary */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 67 | |
| 68 | /******************************************************************************* |
| 69 | * Constants that allow assembler code to access members of and the |
| 70 | * 'el1_sys_regs' structure at their correct offsets. Note that some of the |
| 71 | * registers are only 32-bits wide but are stored as 64-bit values for |
| 72 | * convenience |
| 73 | ******************************************************************************/ |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 74 | #define CTX_EL1_SYSREGS_OFFSET (CTX_EL3STATE_OFFSET + CTX_EL3STATE_END) |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 75 | #define CTX_SPSR_EL1 U(0x0) |
| 76 | #define CTX_ELR_EL1 U(0x8) |
| 77 | #define CTX_SCTLR_EL1 U(0x10) |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 78 | #define CTX_TCR_EL1 U(0x18) |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 79 | #define CTX_CPACR_EL1 U(0x20) |
| 80 | #define CTX_CSSELR_EL1 U(0x28) |
| 81 | #define CTX_SP_EL1 U(0x30) |
| 82 | #define CTX_ESR_EL1 U(0x38) |
| 83 | #define CTX_TTBR0_EL1 U(0x40) |
| 84 | #define CTX_TTBR1_EL1 U(0x48) |
| 85 | #define CTX_MAIR_EL1 U(0x50) |
| 86 | #define CTX_AMAIR_EL1 U(0x58) |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 87 | #define CTX_ACTLR_EL1 U(0x60) |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 88 | #define CTX_TPIDR_EL1 U(0x68) |
| 89 | #define CTX_TPIDR_EL0 U(0x70) |
| 90 | #define CTX_TPIDRRO_EL0 U(0x78) |
| 91 | #define CTX_PAR_EL1 U(0x80) |
| 92 | #define CTX_FAR_EL1 U(0x88) |
| 93 | #define CTX_AFSR0_EL1 U(0x90) |
| 94 | #define CTX_AFSR1_EL1 U(0x98) |
| 95 | #define CTX_CONTEXTIDR_EL1 U(0xa0) |
| 96 | #define CTX_VBAR_EL1 U(0xa8) |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 97 | |
| 98 | /* |
| 99 | * If the platform is AArch64-only, there is no need to save and restore these |
| 100 | * AArch32 registers. |
| 101 | */ |
| 102 | #if CTX_INCLUDE_AARCH32_REGS |
Alexei Fedorov | 503bbf3 | 2019-08-13 15:17:53 +0100 | [diff] [blame] | 103 | #define CTX_SPSR_ABT U(0xb0) /* Align to the next 16 byte boundary */ |
| 104 | #define CTX_SPSR_UND U(0xb8) |
| 105 | #define CTX_SPSR_IRQ U(0xc0) |
| 106 | #define CTX_SPSR_FIQ U(0xc8) |
| 107 | #define CTX_DACR32_EL2 U(0xd0) |
| 108 | #define CTX_IFSR32_EL2 U(0xd8) |
| 109 | #define CTX_AARCH32_END U(0xe0) /* Align to the next 16 byte boundary */ |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 110 | #else |
Alexei Fedorov | 503bbf3 | 2019-08-13 15:17:53 +0100 | [diff] [blame] | 111 | #define CTX_AARCH32_END U(0xb0) /* Align to the next 16 byte boundary */ |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 112 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 113 | |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 114 | /* |
| 115 | * If the timer registers aren't saved and restored, we don't have to reserve |
| 116 | * space for them in the context |
| 117 | */ |
| 118 | #if NS_TIMER_SWITCH |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 119 | #define CTX_CNTP_CTL_EL0 (CTX_AARCH32_END + U(0x0)) |
| 120 | #define CTX_CNTP_CVAL_EL0 (CTX_AARCH32_END + U(0x8)) |
| 121 | #define CTX_CNTV_CTL_EL0 (CTX_AARCH32_END + U(0x10)) |
| 122 | #define CTX_CNTV_CVAL_EL0 (CTX_AARCH32_END + U(0x18)) |
| 123 | #define CTX_CNTKCTL_EL1 (CTX_AARCH32_END + U(0x20)) |
| 124 | #define CTX_TIMER_SYSREGS_END (CTX_AARCH32_END + U(0x30)) /* Align to the next 16 byte boundary */ |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 125 | #else |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 126 | #define CTX_TIMER_SYSREGS_END CTX_AARCH32_END |
| 127 | #endif /* NS_TIMER_SWITCH */ |
| 128 | |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 129 | #if CTX_INCLUDE_MTE_REGS |
| 130 | #define CTX_TFSRE0_EL1 (CTX_TIMER_SYSREGS_END + U(0x0)) |
| 131 | #define CTX_TFSR_EL1 (CTX_TIMER_SYSREGS_END + U(0x8)) |
| 132 | #define CTX_RGSR_EL1 (CTX_TIMER_SYSREGS_END + U(0x10)) |
| 133 | #define CTX_GCR_EL1 (CTX_TIMER_SYSREGS_END + U(0x18)) |
| 134 | |
| 135 | /* Align to the next 16 byte boundary */ |
| 136 | #define CTX_MTE_REGS_END (CTX_TIMER_SYSREGS_END + U(0x20)) |
| 137 | #else |
| 138 | #define CTX_MTE_REGS_END CTX_TIMER_SYSREGS_END |
| 139 | #endif /* CTX_INCLUDE_MTE_REGS */ |
| 140 | |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 141 | /* |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 142 | * End of system registers. |
| 143 | */ |
| 144 | #define CTX_EL1_SYSREGS_END CTX_MTE_REGS_END |
| 145 | |
| 146 | /* |
| 147 | * EL2 register set |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 148 | */ |
| 149 | |
| 150 | #if CTX_INCLUDE_EL2_REGS |
| 151 | /* For later discussion |
| 152 | * ICH_AP0R<n>_EL2 |
| 153 | * ICH_AP1R<n>_EL2 |
| 154 | * AMEVCNTVOFF0<n>_EL2 |
| 155 | * AMEVCNTVOFF1<n>_EL2 |
| 156 | * ICH_LR<n>_EL2 |
| 157 | */ |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 158 | #define CTX_EL2_SYSREGS_OFFSET (CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END) |
| 159 | |
| 160 | #define CTX_ACTLR_EL2 U(0x0) |
| 161 | #define CTX_AFSR0_EL2 U(0x8) |
| 162 | #define CTX_AFSR1_EL2 U(0x10) |
| 163 | #define CTX_AMAIR_EL2 U(0x18) |
| 164 | #define CTX_CNTHCTL_EL2 U(0x20) |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 165 | #define CTX_CNTVOFF_EL2 U(0x28) |
| 166 | #define CTX_CPTR_EL2 U(0x30) |
| 167 | #define CTX_DBGVCR32_EL2 U(0x38) |
| 168 | #define CTX_ELR_EL2 U(0x40) |
| 169 | #define CTX_ESR_EL2 U(0x48) |
| 170 | #define CTX_FAR_EL2 U(0x50) |
| 171 | #define CTX_HACR_EL2 U(0x58) |
| 172 | #define CTX_HCR_EL2 U(0x60) |
| 173 | #define CTX_HPFAR_EL2 U(0x68) |
| 174 | #define CTX_HSTR_EL2 U(0x70) |
| 175 | #define CTX_ICC_SRE_EL2 U(0x78) |
| 176 | #define CTX_ICH_HCR_EL2 U(0x80) |
| 177 | #define CTX_ICH_VMCR_EL2 U(0x88) |
| 178 | #define CTX_MAIR_EL2 U(0x90) |
| 179 | #define CTX_MDCR_EL2 U(0x98) |
| 180 | #define CTX_PMSCR_EL2 U(0xa0) |
| 181 | #define CTX_SCTLR_EL2 U(0xa8) |
| 182 | #define CTX_SPSR_EL2 U(0xb0) |
| 183 | #define CTX_SP_EL2 U(0xb8) |
| 184 | #define CTX_TCR_EL2 U(0xc0) |
| 185 | #define CTX_TPIDR_EL2 U(0xc8) |
| 186 | #define CTX_TTBR0_EL2 U(0xd0) |
| 187 | #define CTX_VBAR_EL2 U(0xd8) |
| 188 | #define CTX_VMPIDR_EL2 U(0xe0) |
| 189 | #define CTX_VPIDR_EL2 U(0xe8) |
| 190 | #define CTX_VTCR_EL2 U(0xf0) |
| 191 | #define CTX_VTTBR_EL2 U(0xf8) |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 192 | |
| 193 | // Only if MTE registers in use |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 194 | #define CTX_TFSR_EL2 U(0x100) |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 195 | |
| 196 | // Only if ENABLE_MPAM_FOR_LOWER_ELS==1 |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 197 | #define CTX_MPAM2_EL2 U(0x108) |
| 198 | #define CTX_MPAMHCR_EL2 U(0x110) |
| 199 | #define CTX_MPAMVPM0_EL2 U(0x118) |
| 200 | #define CTX_MPAMVPM1_EL2 U(0x120) |
| 201 | #define CTX_MPAMVPM2_EL2 U(0x128) |
| 202 | #define CTX_MPAMVPM3_EL2 U(0x130) |
| 203 | #define CTX_MPAMVPM4_EL2 U(0x138) |
| 204 | #define CTX_MPAMVPM5_EL2 U(0x140) |
| 205 | #define CTX_MPAMVPM6_EL2 U(0x148) |
| 206 | #define CTX_MPAMVPM7_EL2 U(0x150) |
| 207 | #define CTX_MPAMVPMV_EL2 U(0x158) |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 208 | |
| 209 | // Starting with Armv8.6 |
Jayanth Dodderi Chidanand | 13ae0f4 | 2021-11-25 14:59:30 +0000 | [diff] [blame] | 210 | #define CTX_HDFGRTR_EL2 U(0x160) |
| 211 | #define CTX_HAFGRTR_EL2 U(0x168) |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 212 | #define CTX_HDFGWTR_EL2 U(0x170) |
| 213 | #define CTX_HFGITR_EL2 U(0x178) |
| 214 | #define CTX_HFGRTR_EL2 U(0x180) |
| 215 | #define CTX_HFGWTR_EL2 U(0x188) |
| 216 | #define CTX_CNTPOFF_EL2 U(0x190) |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 217 | |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 218 | // Starting with Armv8.4 |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 219 | #define CTX_CONTEXTIDR_EL2 U(0x198) |
Jayanth Dodderi Chidanand | 72b69b8 | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 220 | #define CTX_TTBR1_EL2 U(0x1a0) |
| 221 | #define CTX_VDISR_EL2 U(0x1a8) |
| 222 | #define CTX_VSESR_EL2 U(0x1b0) |
Zelalem Aweke | bd17eae | 2021-11-03 13:31:53 -0500 | [diff] [blame] | 223 | #define CTX_VNCR_EL2 U(0x1b8) |
| 224 | #define CTX_TRFCR_EL2 U(0x1c0) |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 225 | |
| 226 | // Starting with Armv8.5 |
Zelalem Aweke | bd17eae | 2021-11-03 13:31:53 -0500 | [diff] [blame] | 227 | #define CTX_SCXTNUM_EL2 U(0x1c8) |
johpow01 | f91e59f | 2021-08-04 19:38:18 -0500 | [diff] [blame] | 228 | |
| 229 | // Register for FEAT_HCX |
Zelalem Aweke | bd17eae | 2021-11-03 13:31:53 -0500 | [diff] [blame] | 230 | #define CTX_HCRX_EL2 U(0x1d0) |
johpow01 | f91e59f | 2021-08-04 19:38:18 -0500 | [diff] [blame] | 231 | |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 232 | /* Align to the next 16 byte boundary */ |
Zelalem Aweke | bd17eae | 2021-11-03 13:31:53 -0500 | [diff] [blame] | 233 | #define CTX_EL2_SYSREGS_END U(0x1e0) |
Olivier Deprez | 1962891 | 2020-03-20 14:22:05 +0100 | [diff] [blame] | 234 | |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 235 | #endif /* CTX_INCLUDE_EL2_REGS */ |
| 236 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 237 | /******************************************************************************* |
| 238 | * Constants that allow assembler code to access members of and the 'fp_regs' |
| 239 | * structure at their correct offsets. |
| 240 | ******************************************************************************/ |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 241 | #if CTX_INCLUDE_EL2_REGS |
| 242 | # define CTX_FPREGS_OFFSET (CTX_EL2_SYSREGS_OFFSET + CTX_EL2_SYSREGS_END) |
| 243 | #else |
| 244 | # define CTX_FPREGS_OFFSET (CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END) |
| 245 | #endif |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 246 | #if CTX_INCLUDE_FPREGS |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 247 | #define CTX_FP_Q0 U(0x0) |
| 248 | #define CTX_FP_Q1 U(0x10) |
| 249 | #define CTX_FP_Q2 U(0x20) |
| 250 | #define CTX_FP_Q3 U(0x30) |
| 251 | #define CTX_FP_Q4 U(0x40) |
| 252 | #define CTX_FP_Q5 U(0x50) |
| 253 | #define CTX_FP_Q6 U(0x60) |
| 254 | #define CTX_FP_Q7 U(0x70) |
| 255 | #define CTX_FP_Q8 U(0x80) |
| 256 | #define CTX_FP_Q9 U(0x90) |
| 257 | #define CTX_FP_Q10 U(0xa0) |
| 258 | #define CTX_FP_Q11 U(0xb0) |
| 259 | #define CTX_FP_Q12 U(0xc0) |
| 260 | #define CTX_FP_Q13 U(0xd0) |
| 261 | #define CTX_FP_Q14 U(0xe0) |
| 262 | #define CTX_FP_Q15 U(0xf0) |
| 263 | #define CTX_FP_Q16 U(0x100) |
| 264 | #define CTX_FP_Q17 U(0x110) |
| 265 | #define CTX_FP_Q18 U(0x120) |
| 266 | #define CTX_FP_Q19 U(0x130) |
| 267 | #define CTX_FP_Q20 U(0x140) |
| 268 | #define CTX_FP_Q21 U(0x150) |
| 269 | #define CTX_FP_Q22 U(0x160) |
| 270 | #define CTX_FP_Q23 U(0x170) |
| 271 | #define CTX_FP_Q24 U(0x180) |
| 272 | #define CTX_FP_Q25 U(0x190) |
| 273 | #define CTX_FP_Q26 U(0x1a0) |
| 274 | #define CTX_FP_Q27 U(0x1b0) |
| 275 | #define CTX_FP_Q28 U(0x1c0) |
| 276 | #define CTX_FP_Q29 U(0x1d0) |
| 277 | #define CTX_FP_Q30 U(0x1e0) |
| 278 | #define CTX_FP_Q31 U(0x1f0) |
| 279 | #define CTX_FP_FPSR U(0x200) |
| 280 | #define CTX_FP_FPCR U(0x208) |
David Cunado | d1a1fd4 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 281 | #if CTX_INCLUDE_AARCH32_REGS |
| 282 | #define CTX_FP_FPEXC32_EL2 U(0x210) |
| 283 | #define CTX_FPREGS_END U(0x220) /* Align to the next 16 byte boundary */ |
| 284 | #else |
| 285 | #define CTX_FPREGS_END U(0x210) /* Align to the next 16 byte boundary */ |
| 286 | #endif |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 287 | #else |
| 288 | #define CTX_FPREGS_END U(0) |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 289 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 290 | |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 291 | /******************************************************************************* |
| 292 | * Registers related to CVE-2018-3639 |
| 293 | ******************************************************************************/ |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 294 | #define CTX_CVE_2018_3639_OFFSET (CTX_FPREGS_OFFSET + CTX_FPREGS_END) |
| 295 | #define CTX_CVE_2018_3639_DISABLE U(0) |
| 296 | #define CTX_CVE_2018_3639_END U(0x10) /* Align to the next 16 byte boundary */ |
| 297 | |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 298 | /******************************************************************************* |
| 299 | * Registers related to ARMv8.3-PAuth. |
| 300 | ******************************************************************************/ |
| 301 | #define CTX_PAUTH_REGS_OFFSET (CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END) |
| 302 | #if CTX_INCLUDE_PAUTH_REGS |
| 303 | #define CTX_PACIAKEY_LO U(0x0) |
| 304 | #define CTX_PACIAKEY_HI U(0x8) |
| 305 | #define CTX_PACIBKEY_LO U(0x10) |
| 306 | #define CTX_PACIBKEY_HI U(0x18) |
| 307 | #define CTX_PACDAKEY_LO U(0x20) |
| 308 | #define CTX_PACDAKEY_HI U(0x28) |
| 309 | #define CTX_PACDBKEY_LO U(0x30) |
| 310 | #define CTX_PACDBKEY_HI U(0x38) |
| 311 | #define CTX_PACGAKEY_LO U(0x40) |
| 312 | #define CTX_PACGAKEY_HI U(0x48) |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 313 | #define CTX_PAUTH_REGS_END U(0x50) /* Align to the next 16 byte boundary */ |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 314 | #else |
| 315 | #define CTX_PAUTH_REGS_END U(0) |
| 316 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
| 317 | |
Julius Werner | 53456fc | 2019-07-09 13:49:11 -0700 | [diff] [blame] | 318 | #ifndef __ASSEMBLER__ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 319 | |
Dan Handley | 2bd4ef2 | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 320 | #include <stdint.h> |
| 321 | |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 322 | #include <lib/cassert.h> |
| 323 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 324 | /* |
| 325 | * Common constants to help define the 'cpu_context' structure and its |
| 326 | * members below. |
| 327 | */ |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 328 | #define DWORD_SHIFT U(3) |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 329 | #define DEFINE_REG_STRUCT(name, num_regs) \ |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 330 | typedef struct name { \ |
Zelalem | 91d8061 | 2020-02-12 10:37:03 -0600 | [diff] [blame] | 331 | uint64_t ctx_regs[num_regs]; \ |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 332 | } __aligned(16) name##_t |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 333 | |
| 334 | /* Constants to determine the size of individual context structures */ |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 335 | #define CTX_GPREG_ALL (CTX_GPREGS_END >> DWORD_SHIFT) |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 336 | #define CTX_EL1_SYSREGS_ALL (CTX_EL1_SYSREGS_END >> DWORD_SHIFT) |
| 337 | #if CTX_INCLUDE_EL2_REGS |
| 338 | # define CTX_EL2_SYSREGS_ALL (CTX_EL2_SYSREGS_END >> DWORD_SHIFT) |
| 339 | #endif |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 340 | #if CTX_INCLUDE_FPREGS |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 341 | # define CTX_FPREG_ALL (CTX_FPREGS_END >> DWORD_SHIFT) |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 342 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 343 | #define CTX_EL3STATE_ALL (CTX_EL3STATE_END >> DWORD_SHIFT) |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 344 | #define CTX_CVE_2018_3639_ALL (CTX_CVE_2018_3639_END >> DWORD_SHIFT) |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 345 | #if CTX_INCLUDE_PAUTH_REGS |
| 346 | # define CTX_PAUTH_REGS_ALL (CTX_PAUTH_REGS_END >> DWORD_SHIFT) |
| 347 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 348 | |
| 349 | /* |
Soby Mathew | 6c5192a | 2014-04-30 15:36:37 +0100 | [diff] [blame] | 350 | * AArch64 general purpose register context structure. Usually x0-x18, |
| 351 | * lr are saved as the compiler is expected to preserve the remaining |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 352 | * callee saved registers if used by the C runtime and the assembler |
Soby Mathew | 6c5192a | 2014-04-30 15:36:37 +0100 | [diff] [blame] | 353 | * does not touch the remaining. But in case of world switch during |
| 354 | * exception handling, we need to save the callee registers too. |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 355 | */ |
Jeenu Viswambharan | caa8493 | 2014-02-06 10:36:15 +0000 | [diff] [blame] | 356 | DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL); |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 357 | |
| 358 | /* |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 359 | * AArch64 EL1 system register context structure for preserving the |
| 360 | * architectural state during world switches. |
| 361 | */ |
| 362 | DEFINE_REG_STRUCT(el1_sysregs, CTX_EL1_SYSREGS_ALL); |
| 363 | |
| 364 | |
| 365 | /* |
| 366 | * AArch64 EL2 system register context structure for preserving the |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 367 | * architectural state during world switches. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 368 | */ |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 369 | #if CTX_INCLUDE_EL2_REGS |
| 370 | DEFINE_REG_STRUCT(el2_sysregs, CTX_EL2_SYSREGS_ALL); |
| 371 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 372 | |
| 373 | /* |
| 374 | * AArch64 floating point register context structure for preserving |
| 375 | * the floating point state during switches from one security state to |
| 376 | * another. |
| 377 | */ |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 378 | #if CTX_INCLUDE_FPREGS |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 379 | DEFINE_REG_STRUCT(fp_regs, CTX_FPREG_ALL); |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 380 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 381 | |
| 382 | /* |
| 383 | * Miscellaneous registers used by EL3 firmware to maintain its state |
| 384 | * across exception entries and exits |
| 385 | */ |
| 386 | DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL); |
| 387 | |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 388 | /* Function pointer used by CVE-2018-3639 dynamic mitigation */ |
| 389 | DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL); |
| 390 | |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 391 | /* Registers associated to ARMv8.3-PAuth */ |
| 392 | #if CTX_INCLUDE_PAUTH_REGS |
| 393 | DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL); |
| 394 | #endif |
| 395 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 396 | /* |
| 397 | * Macros to access members of any of the above structures using their |
| 398 | * offsets |
| 399 | */ |
Zelalem | 91d8061 | 2020-02-12 10:37:03 -0600 | [diff] [blame] | 400 | #define read_ctx_reg(ctx, offset) ((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) |
| 401 | #define write_ctx_reg(ctx, offset, val) (((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \ |
Jeenu Viswambharan | 32ceef5 | 2018-08-02 10:14:12 +0100 | [diff] [blame] | 402 | = (uint64_t) (val)) |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 403 | |
| 404 | /* |
Zelalem Aweke | b6301e6 | 2021-07-09 17:54:30 -0500 | [diff] [blame] | 405 | * Top-level context structure which is used by EL3 firmware to preserve |
| 406 | * the state of a core at the next lower EL in a given security state and |
| 407 | * save enough EL3 meta data to be able to return to that EL and security |
| 408 | * state. The context management library will be used to ensure that |
| 409 | * SP_EL3 always points to an instance of this structure at exception |
| 410 | * entry and exit. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 411 | */ |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 412 | typedef struct cpu_context { |
| 413 | gp_regs_t gpregs_ctx; |
| 414 | el3_state_t el3state_ctx; |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 415 | el1_sysregs_t el1_sysregs_ctx; |
| 416 | #if CTX_INCLUDE_EL2_REGS |
| 417 | el2_sysregs_t el2_sysregs_ctx; |
| 418 | #endif |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 419 | #if CTX_INCLUDE_FPREGS |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 420 | fp_regs_t fpregs_ctx; |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 421 | #endif |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 422 | cve_2018_3639_t cve_2018_3639_ctx; |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 423 | #if CTX_INCLUDE_PAUTH_REGS |
| 424 | pauth_t pauth_ctx; |
| 425 | #endif |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 426 | } cpu_context_t; |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 427 | |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 428 | /* Macros to access members of the 'cpu_context_t' structure */ |
| 429 | #define get_el3state_ctx(h) (&((cpu_context_t *) h)->el3state_ctx) |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 430 | #if CTX_INCLUDE_FPREGS |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 431 | # define get_fpregs_ctx(h) (&((cpu_context_t *) h)->fpregs_ctx) |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 432 | #endif |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 433 | #define get_el1_sysregs_ctx(h) (&((cpu_context_t *) h)->el1_sysregs_ctx) |
| 434 | #if CTX_INCLUDE_EL2_REGS |
| 435 | # define get_el2_sysregs_ctx(h) (&((cpu_context_t *) h)->el2_sysregs_ctx) |
| 436 | #endif |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 437 | #define get_gpregs_ctx(h) (&((cpu_context_t *) h)->gpregs_ctx) |
Dimitris Papastamos | bb1fd5b | 2018-06-07 11:29:15 +0100 | [diff] [blame] | 438 | #define get_cve_2018_3639_ctx(h) (&((cpu_context_t *) h)->cve_2018_3639_ctx) |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 439 | #if CTX_INCLUDE_PAUTH_REGS |
| 440 | # define get_pauth_ctx(h) (&((cpu_context_t *) h)->pauth_ctx) |
| 441 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 442 | |
| 443 | /* |
| 444 | * Compile time assertions related to the 'cpu_context' structure to |
| 445 | * ensure that the assembler and the compiler view of the offsets of |
| 446 | * the structure members is the same. |
| 447 | */ |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 448 | CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx), \ |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 449 | assert_core_context_gp_offset_mismatch); |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 450 | CASSERT(CTX_EL1_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el1_sysregs_ctx), \ |
| 451 | assert_core_context_el1_sys_offset_mismatch); |
| 452 | #if CTX_INCLUDE_EL2_REGS |
| 453 | CASSERT(CTX_EL2_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el2_sysregs_ctx), \ |
| 454 | assert_core_context_el2_sys_offset_mismatch); |
| 455 | #endif |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 456 | #if CTX_INCLUDE_FPREGS |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 457 | CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx), \ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 458 | assert_core_context_fp_offset_mismatch); |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 459 | #endif |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 460 | CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx), \ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 461 | assert_core_context_el3state_offset_mismatch); |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 462 | CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx), \ |
| 463 | assert_core_context_cve_2018_3639_offset_mismatch); |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 464 | #if CTX_INCLUDE_PAUTH_REGS |
| 465 | CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx), \ |
| 466 | assert_core_context_pauth_offset_mismatch); |
| 467 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 468 | |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 469 | /* |
| 470 | * Helper macro to set the general purpose registers that correspond to |
| 471 | * parameters in an aapcs_64 call i.e. x0-x7 |
| 472 | */ |
| 473 | #define set_aapcs_args0(ctx, x0) do { \ |
| 474 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0, x0); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 475 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 476 | #define set_aapcs_args1(ctx, x0, x1) do { \ |
| 477 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1, x1); \ |
| 478 | set_aapcs_args0(ctx, x0); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 479 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 480 | #define set_aapcs_args2(ctx, x0, x1, x2) do { \ |
| 481 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2, x2); \ |
| 482 | set_aapcs_args1(ctx, x0, x1); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 483 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 484 | #define set_aapcs_args3(ctx, x0, x1, x2, x3) do { \ |
| 485 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3, x3); \ |
| 486 | set_aapcs_args2(ctx, x0, x1, x2); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 487 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 488 | #define set_aapcs_args4(ctx, x0, x1, x2, x3, x4) do { \ |
| 489 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4, x4); \ |
| 490 | set_aapcs_args3(ctx, x0, x1, x2, x3); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 491 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 492 | #define set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5) do { \ |
| 493 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X5, x5); \ |
| 494 | set_aapcs_args4(ctx, x0, x1, x2, x3, x4); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 495 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 496 | #define set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6) do { \ |
| 497 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X6, x6); \ |
| 498 | set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 499 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 500 | #define set_aapcs_args7(ctx, x0, x1, x2, x3, x4, x5, x6, x7) do { \ |
| 501 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X7, x7); \ |
| 502 | set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 503 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 504 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 505 | /******************************************************************************* |
| 506 | * Function prototypes |
| 507 | ******************************************************************************/ |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 508 | void el1_sysregs_context_save(el1_sysregs_t *regs); |
| 509 | void el1_sysregs_context_restore(el1_sysregs_t *regs); |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 510 | |
| 511 | #if CTX_INCLUDE_EL2_REGS |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 512 | void el2_sysregs_context_save_common(el2_sysregs_t *regs); |
| 513 | void el2_sysregs_context_restore_common(el2_sysregs_t *regs); |
| 514 | #if ENABLE_SPE_FOR_LOWER_ELS |
| 515 | void el2_sysregs_context_save_spe(el2_sysregs_t *regs); |
| 516 | void el2_sysregs_context_restore_spe(el2_sysregs_t *regs); |
| 517 | #endif /* ENABLE_SPE_FOR_LOWER_ELS */ |
| 518 | #if CTX_INCLUDE_MTE_REGS |
| 519 | void el2_sysregs_context_save_mte(el2_sysregs_t *regs); |
| 520 | void el2_sysregs_context_restore_mte(el2_sysregs_t *regs); |
| 521 | #endif /* CTX_INCLUDE_MTE_REGS */ |
| 522 | #if ENABLE_MPAM_FOR_LOWER_ELS |
| 523 | void el2_sysregs_context_save_mpam(el2_sysregs_t *regs); |
| 524 | void el2_sysregs_context_restore_mpam(el2_sysregs_t *regs); |
| 525 | #endif /* ENABLE_MPAM_FOR_LOWER_ELS */ |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 526 | #if ENABLE_FEAT_ECV |
| 527 | void el2_sysregs_context_save_ecv(el2_sysregs_t *regs); |
| 528 | void el2_sysregs_context_restore_ecv(el2_sysregs_t *regs); |
| 529 | #endif /* ENABLE_FEAT_ECV */ |
| 530 | #if ENABLE_FEAT_VHE |
| 531 | void el2_sysregs_context_save_vhe(el2_sysregs_t *regs); |
| 532 | void el2_sysregs_context_restore_vhe(el2_sysregs_t *regs); |
| 533 | #endif /* ENABLE_FEAT_VHE */ |
| 534 | #if RAS_EXTENSION |
| 535 | void el2_sysregs_context_save_ras(el2_sysregs_t *regs); |
| 536 | void el2_sysregs_context_restore_ras(el2_sysregs_t *regs); |
| 537 | #endif /* RAS_EXTENSION */ |
| 538 | #if CTX_INCLUDE_NEVE_REGS |
| 539 | void el2_sysregs_context_save_nv2(el2_sysregs_t *regs); |
| 540 | void el2_sysregs_context_restore_nv2(el2_sysregs_t *regs); |
| 541 | #endif /* CTX_INCLUDE_NEVE_REGS */ |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 542 | #if ENABLE_FEAT_CSV2_2 |
| 543 | void el2_sysregs_context_save_csv2(el2_sysregs_t *regs); |
| 544 | void el2_sysregs_context_restore_csv2(el2_sysregs_t *regs); |
| 545 | #endif /* ENABLE_FEAT_CSV2_2 */ |
Zelalem Aweke | 5362beb | 2022-04-04 17:42:48 -0500 | [diff] [blame] | 546 | #endif /* CTX_INCLUDE_EL2_REGS */ |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 547 | |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 548 | #if CTX_INCLUDE_FPREGS |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 549 | void fpregs_context_save(fp_regs_t *regs); |
| 550 | void fpregs_context_restore(fp_regs_t *regs); |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 551 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 552 | |
Julius Werner | 53456fc | 2019-07-09 13:49:11 -0700 | [diff] [blame] | 553 | #endif /* __ASSEMBLER__ */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 554 | |
Antonio Nino Diaz | 864ca6f | 2018-10-31 15:25:35 +0000 | [diff] [blame] | 555 | #endif /* CONTEXT_H */ |