Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 1 | /* |
Madhukar Pappireddy | fba2572 | 2020-07-24 03:27:12 -0500 | [diff] [blame] | 2 | * Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 3 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 4 | * SPDX-License-Identifier: BSD-3-Clause |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 5 | */ |
| 6 | |
Antonio Nino Diaz | 864ca6f | 2018-10-31 15:25:35 +0000 | [diff] [blame] | 7 | #ifndef CONTEXT_H |
| 8 | #define CONTEXT_H |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 9 | |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 10 | #include <lib/utils_def.h> |
Jeenu Viswambharan | 96c7df0 | 2017-11-30 12:54:15 +0000 | [diff] [blame] | 11 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 12 | /******************************************************************************* |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 13 | * Constants that allow assembler code to access members of and the 'gp_regs' |
| 14 | * structure at their correct offsets. |
| 15 | ******************************************************************************/ |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 16 | #define CTX_GPREGS_OFFSET U(0x0) |
| 17 | #define CTX_GPREG_X0 U(0x0) |
| 18 | #define CTX_GPREG_X1 U(0x8) |
| 19 | #define CTX_GPREG_X2 U(0x10) |
| 20 | #define CTX_GPREG_X3 U(0x18) |
| 21 | #define CTX_GPREG_X4 U(0x20) |
| 22 | #define CTX_GPREG_X5 U(0x28) |
| 23 | #define CTX_GPREG_X6 U(0x30) |
| 24 | #define CTX_GPREG_X7 U(0x38) |
| 25 | #define CTX_GPREG_X8 U(0x40) |
| 26 | #define CTX_GPREG_X9 U(0x48) |
| 27 | #define CTX_GPREG_X10 U(0x50) |
| 28 | #define CTX_GPREG_X11 U(0x58) |
| 29 | #define CTX_GPREG_X12 U(0x60) |
| 30 | #define CTX_GPREG_X13 U(0x68) |
| 31 | #define CTX_GPREG_X14 U(0x70) |
| 32 | #define CTX_GPREG_X15 U(0x78) |
| 33 | #define CTX_GPREG_X16 U(0x80) |
| 34 | #define CTX_GPREG_X17 U(0x88) |
| 35 | #define CTX_GPREG_X18 U(0x90) |
| 36 | #define CTX_GPREG_X19 U(0x98) |
| 37 | #define CTX_GPREG_X20 U(0xa0) |
| 38 | #define CTX_GPREG_X21 U(0xa8) |
| 39 | #define CTX_GPREG_X22 U(0xb0) |
| 40 | #define CTX_GPREG_X23 U(0xb8) |
| 41 | #define CTX_GPREG_X24 U(0xc0) |
| 42 | #define CTX_GPREG_X25 U(0xc8) |
| 43 | #define CTX_GPREG_X26 U(0xd0) |
| 44 | #define CTX_GPREG_X27 U(0xd8) |
| 45 | #define CTX_GPREG_X28 U(0xe0) |
| 46 | #define CTX_GPREG_X29 U(0xe8) |
| 47 | #define CTX_GPREG_LR U(0xf0) |
| 48 | #define CTX_GPREG_SP_EL0 U(0xf8) |
| 49 | #define CTX_GPREGS_END U(0x100) |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 50 | |
| 51 | /******************************************************************************* |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 52 | * Constants that allow assembler code to access members of and the 'el3_state' |
| 53 | * structure at their correct offsets. Note that some of the registers are only |
| 54 | * 32-bits wide but are stored as 64-bit values for convenience |
| 55 | ******************************************************************************/ |
Dimitris Papastamos | b63c6f1 | 2018-01-11 15:29:36 +0000 | [diff] [blame] | 56 | #define CTX_EL3STATE_OFFSET (CTX_GPREGS_OFFSET + CTX_GPREGS_END) |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 57 | #define CTX_SCR_EL3 U(0x0) |
Jeenu Viswambharan | 96c7df0 | 2017-11-30 12:54:15 +0000 | [diff] [blame] | 58 | #define CTX_ESR_EL3 U(0x8) |
| 59 | #define CTX_RUNTIME_SP U(0x10) |
| 60 | #define CTX_SPSR_EL3 U(0x18) |
| 61 | #define CTX_ELR_EL3 U(0x20) |
Alexei Fedorov | 503bbf3 | 2019-08-13 15:17:53 +0100 | [diff] [blame] | 62 | #define CTX_PMCR_EL0 U(0x28) |
Madhukar Pappireddy | fba2572 | 2020-07-24 03:27:12 -0500 | [diff] [blame] | 63 | #define CTX_IS_IN_EL3 U(0x30) |
Max Shvetsov | c450277 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 64 | #define CTX_CPTR_EL3 U(0x38) |
| 65 | #define CTX_ZCR_EL3 U(0x40) |
| 66 | #define CTX_EL3STATE_END U(0x50) /* Align to the next 16 byte boundary */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 67 | |
| 68 | /******************************************************************************* |
| 69 | * Constants that allow assembler code to access members of and the |
| 70 | * 'el1_sys_regs' structure at their correct offsets. Note that some of the |
| 71 | * registers are only 32-bits wide but are stored as 64-bit values for |
| 72 | * convenience |
| 73 | ******************************************************************************/ |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 74 | #define CTX_EL1_SYSREGS_OFFSET (CTX_EL3STATE_OFFSET + CTX_EL3STATE_END) |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 75 | #define CTX_SPSR_EL1 U(0x0) |
| 76 | #define CTX_ELR_EL1 U(0x8) |
| 77 | #define CTX_SCTLR_EL1 U(0x10) |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 78 | #define CTX_TCR_EL1 U(0x18) |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 79 | #define CTX_CPACR_EL1 U(0x20) |
| 80 | #define CTX_CSSELR_EL1 U(0x28) |
| 81 | #define CTX_SP_EL1 U(0x30) |
| 82 | #define CTX_ESR_EL1 U(0x38) |
| 83 | #define CTX_TTBR0_EL1 U(0x40) |
| 84 | #define CTX_TTBR1_EL1 U(0x48) |
| 85 | #define CTX_MAIR_EL1 U(0x50) |
| 86 | #define CTX_AMAIR_EL1 U(0x58) |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 87 | #define CTX_ACTLR_EL1 U(0x60) |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 88 | #define CTX_TPIDR_EL1 U(0x68) |
| 89 | #define CTX_TPIDR_EL0 U(0x70) |
| 90 | #define CTX_TPIDRRO_EL0 U(0x78) |
| 91 | #define CTX_PAR_EL1 U(0x80) |
| 92 | #define CTX_FAR_EL1 U(0x88) |
| 93 | #define CTX_AFSR0_EL1 U(0x90) |
| 94 | #define CTX_AFSR1_EL1 U(0x98) |
| 95 | #define CTX_CONTEXTIDR_EL1 U(0xa0) |
| 96 | #define CTX_VBAR_EL1 U(0xa8) |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 97 | |
| 98 | /* |
| 99 | * If the platform is AArch64-only, there is no need to save and restore these |
| 100 | * AArch32 registers. |
| 101 | */ |
| 102 | #if CTX_INCLUDE_AARCH32_REGS |
Alexei Fedorov | 503bbf3 | 2019-08-13 15:17:53 +0100 | [diff] [blame] | 103 | #define CTX_SPSR_ABT U(0xb0) /* Align to the next 16 byte boundary */ |
| 104 | #define CTX_SPSR_UND U(0xb8) |
| 105 | #define CTX_SPSR_IRQ U(0xc0) |
| 106 | #define CTX_SPSR_FIQ U(0xc8) |
| 107 | #define CTX_DACR32_EL2 U(0xd0) |
| 108 | #define CTX_IFSR32_EL2 U(0xd8) |
| 109 | #define CTX_AARCH32_END U(0xe0) /* Align to the next 16 byte boundary */ |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 110 | #else |
Alexei Fedorov | 503bbf3 | 2019-08-13 15:17:53 +0100 | [diff] [blame] | 111 | #define CTX_AARCH32_END U(0xb0) /* Align to the next 16 byte boundary */ |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 112 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 113 | |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 114 | /* |
| 115 | * If the timer registers aren't saved and restored, we don't have to reserve |
| 116 | * space for them in the context |
| 117 | */ |
| 118 | #if NS_TIMER_SWITCH |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 119 | #define CTX_CNTP_CTL_EL0 (CTX_AARCH32_END + U(0x0)) |
| 120 | #define CTX_CNTP_CVAL_EL0 (CTX_AARCH32_END + U(0x8)) |
| 121 | #define CTX_CNTV_CTL_EL0 (CTX_AARCH32_END + U(0x10)) |
| 122 | #define CTX_CNTV_CVAL_EL0 (CTX_AARCH32_END + U(0x18)) |
| 123 | #define CTX_CNTKCTL_EL1 (CTX_AARCH32_END + U(0x20)) |
| 124 | #define CTX_TIMER_SYSREGS_END (CTX_AARCH32_END + U(0x30)) /* Align to the next 16 byte boundary */ |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 125 | #else |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 126 | #define CTX_TIMER_SYSREGS_END CTX_AARCH32_END |
| 127 | #endif /* NS_TIMER_SWITCH */ |
| 128 | |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 129 | #if CTX_INCLUDE_MTE_REGS |
| 130 | #define CTX_TFSRE0_EL1 (CTX_TIMER_SYSREGS_END + U(0x0)) |
| 131 | #define CTX_TFSR_EL1 (CTX_TIMER_SYSREGS_END + U(0x8)) |
| 132 | #define CTX_RGSR_EL1 (CTX_TIMER_SYSREGS_END + U(0x10)) |
| 133 | #define CTX_GCR_EL1 (CTX_TIMER_SYSREGS_END + U(0x18)) |
| 134 | |
| 135 | /* Align to the next 16 byte boundary */ |
| 136 | #define CTX_MTE_REGS_END (CTX_TIMER_SYSREGS_END + U(0x20)) |
| 137 | #else |
| 138 | #define CTX_MTE_REGS_END CTX_TIMER_SYSREGS_END |
| 139 | #endif /* CTX_INCLUDE_MTE_REGS */ |
| 140 | |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 141 | /* |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 142 | * End of system registers. |
| 143 | */ |
| 144 | #define CTX_EL1_SYSREGS_END CTX_MTE_REGS_END |
| 145 | |
| 146 | /* |
| 147 | * EL2 register set |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 148 | */ |
| 149 | |
| 150 | #if CTX_INCLUDE_EL2_REGS |
| 151 | /* For later discussion |
| 152 | * ICH_AP0R<n>_EL2 |
| 153 | * ICH_AP1R<n>_EL2 |
| 154 | * AMEVCNTVOFF0<n>_EL2 |
| 155 | * AMEVCNTVOFF1<n>_EL2 |
| 156 | * ICH_LR<n>_EL2 |
| 157 | */ |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 158 | #define CTX_EL2_SYSREGS_OFFSET (CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END) |
| 159 | |
| 160 | #define CTX_ACTLR_EL2 U(0x0) |
| 161 | #define CTX_AFSR0_EL2 U(0x8) |
| 162 | #define CTX_AFSR1_EL2 U(0x10) |
| 163 | #define CTX_AMAIR_EL2 U(0x18) |
| 164 | #define CTX_CNTHCTL_EL2 U(0x20) |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 165 | #define CTX_CNTVOFF_EL2 U(0x28) |
| 166 | #define CTX_CPTR_EL2 U(0x30) |
| 167 | #define CTX_DBGVCR32_EL2 U(0x38) |
| 168 | #define CTX_ELR_EL2 U(0x40) |
| 169 | #define CTX_ESR_EL2 U(0x48) |
| 170 | #define CTX_FAR_EL2 U(0x50) |
| 171 | #define CTX_HACR_EL2 U(0x58) |
| 172 | #define CTX_HCR_EL2 U(0x60) |
| 173 | #define CTX_HPFAR_EL2 U(0x68) |
| 174 | #define CTX_HSTR_EL2 U(0x70) |
| 175 | #define CTX_ICC_SRE_EL2 U(0x78) |
| 176 | #define CTX_ICH_HCR_EL2 U(0x80) |
| 177 | #define CTX_ICH_VMCR_EL2 U(0x88) |
| 178 | #define CTX_MAIR_EL2 U(0x90) |
| 179 | #define CTX_MDCR_EL2 U(0x98) |
| 180 | #define CTX_PMSCR_EL2 U(0xa0) |
| 181 | #define CTX_SCTLR_EL2 U(0xa8) |
| 182 | #define CTX_SPSR_EL2 U(0xb0) |
| 183 | #define CTX_SP_EL2 U(0xb8) |
| 184 | #define CTX_TCR_EL2 U(0xc0) |
| 185 | #define CTX_TPIDR_EL2 U(0xc8) |
| 186 | #define CTX_TTBR0_EL2 U(0xd0) |
| 187 | #define CTX_VBAR_EL2 U(0xd8) |
| 188 | #define CTX_VMPIDR_EL2 U(0xe0) |
| 189 | #define CTX_VPIDR_EL2 U(0xe8) |
| 190 | #define CTX_VTCR_EL2 U(0xf0) |
| 191 | #define CTX_VTTBR_EL2 U(0xf8) |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 192 | |
| 193 | // Only if MTE registers in use |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 194 | #define CTX_TFSR_EL2 U(0x100) |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 195 | |
| 196 | // Only if ENABLE_MPAM_FOR_LOWER_ELS==1 |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 197 | #define CTX_MPAM2_EL2 U(0x108) |
| 198 | #define CTX_MPAMHCR_EL2 U(0x110) |
| 199 | #define CTX_MPAMVPM0_EL2 U(0x118) |
| 200 | #define CTX_MPAMVPM1_EL2 U(0x120) |
| 201 | #define CTX_MPAMVPM2_EL2 U(0x128) |
| 202 | #define CTX_MPAMVPM3_EL2 U(0x130) |
| 203 | #define CTX_MPAMVPM4_EL2 U(0x138) |
| 204 | #define CTX_MPAMVPM5_EL2 U(0x140) |
| 205 | #define CTX_MPAMVPM6_EL2 U(0x148) |
| 206 | #define CTX_MPAMVPM7_EL2 U(0x150) |
| 207 | #define CTX_MPAMVPMV_EL2 U(0x158) |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 208 | |
| 209 | // Starting with Armv8.6 |
Jayanth Dodderi Chidanand | 13ae0f4 | 2021-11-25 14:59:30 +0000 | [diff] [blame] | 210 | #define CTX_HDFGRTR_EL2 U(0x160) |
| 211 | #define CTX_HAFGRTR_EL2 U(0x168) |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 212 | #define CTX_HDFGWTR_EL2 U(0x170) |
| 213 | #define CTX_HFGITR_EL2 U(0x178) |
| 214 | #define CTX_HFGRTR_EL2 U(0x180) |
| 215 | #define CTX_HFGWTR_EL2 U(0x188) |
| 216 | #define CTX_CNTPOFF_EL2 U(0x190) |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 217 | |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 218 | // Starting with Armv8.4 |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 219 | #define CTX_CONTEXTIDR_EL2 U(0x198) |
| 220 | #define CTX_SDER32_EL2 U(0x1a0) |
| 221 | #define CTX_TTBR1_EL2 U(0x1a8) |
| 222 | #define CTX_VDISR_EL2 U(0x1b0) |
| 223 | #define CTX_VNCR_EL2 U(0x1b8) |
| 224 | #define CTX_VSESR_EL2 U(0x1c0) |
| 225 | #define CTX_VSTCR_EL2 U(0x1c8) |
| 226 | #define CTX_VSTTBR_EL2 U(0x1d0) |
| 227 | #define CTX_TRFCR_EL2 U(0x1d8) |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 228 | |
| 229 | // Starting with Armv8.5 |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 230 | #define CTX_SCXTNUM_EL2 U(0x1e0) |
johpow01 | f91e59f | 2021-08-04 19:38:18 -0500 | [diff] [blame] | 231 | |
| 232 | // Register for FEAT_HCX |
| 233 | #define CTX_HCRX_EL2 U(0x1e8) |
| 234 | |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 235 | /* Align to the next 16 byte boundary */ |
Max Shvetsov | cf784f7 | 2021-03-31 19:00:38 +0100 | [diff] [blame] | 236 | #define CTX_EL2_SYSREGS_END U(0x1f0) |
Olivier Deprez | 1962891 | 2020-03-20 14:22:05 +0100 | [diff] [blame] | 237 | |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 238 | #endif /* CTX_INCLUDE_EL2_REGS */ |
| 239 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 240 | /******************************************************************************* |
| 241 | * Constants that allow assembler code to access members of and the 'fp_regs' |
| 242 | * structure at their correct offsets. |
| 243 | ******************************************************************************/ |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 244 | #if CTX_INCLUDE_EL2_REGS |
| 245 | # define CTX_FPREGS_OFFSET (CTX_EL2_SYSREGS_OFFSET + CTX_EL2_SYSREGS_END) |
| 246 | #else |
| 247 | # define CTX_FPREGS_OFFSET (CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END) |
| 248 | #endif |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 249 | #if CTX_INCLUDE_FPREGS |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 250 | #define CTX_FP_Q0 U(0x0) |
| 251 | #define CTX_FP_Q1 U(0x10) |
| 252 | #define CTX_FP_Q2 U(0x20) |
| 253 | #define CTX_FP_Q3 U(0x30) |
| 254 | #define CTX_FP_Q4 U(0x40) |
| 255 | #define CTX_FP_Q5 U(0x50) |
| 256 | #define CTX_FP_Q6 U(0x60) |
| 257 | #define CTX_FP_Q7 U(0x70) |
| 258 | #define CTX_FP_Q8 U(0x80) |
| 259 | #define CTX_FP_Q9 U(0x90) |
| 260 | #define CTX_FP_Q10 U(0xa0) |
| 261 | #define CTX_FP_Q11 U(0xb0) |
| 262 | #define CTX_FP_Q12 U(0xc0) |
| 263 | #define CTX_FP_Q13 U(0xd0) |
| 264 | #define CTX_FP_Q14 U(0xe0) |
| 265 | #define CTX_FP_Q15 U(0xf0) |
| 266 | #define CTX_FP_Q16 U(0x100) |
| 267 | #define CTX_FP_Q17 U(0x110) |
| 268 | #define CTX_FP_Q18 U(0x120) |
| 269 | #define CTX_FP_Q19 U(0x130) |
| 270 | #define CTX_FP_Q20 U(0x140) |
| 271 | #define CTX_FP_Q21 U(0x150) |
| 272 | #define CTX_FP_Q22 U(0x160) |
| 273 | #define CTX_FP_Q23 U(0x170) |
| 274 | #define CTX_FP_Q24 U(0x180) |
| 275 | #define CTX_FP_Q25 U(0x190) |
| 276 | #define CTX_FP_Q26 U(0x1a0) |
| 277 | #define CTX_FP_Q27 U(0x1b0) |
| 278 | #define CTX_FP_Q28 U(0x1c0) |
| 279 | #define CTX_FP_Q29 U(0x1d0) |
| 280 | #define CTX_FP_Q30 U(0x1e0) |
| 281 | #define CTX_FP_Q31 U(0x1f0) |
| 282 | #define CTX_FP_FPSR U(0x200) |
| 283 | #define CTX_FP_FPCR U(0x208) |
David Cunado | d1a1fd4 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 284 | #if CTX_INCLUDE_AARCH32_REGS |
| 285 | #define CTX_FP_FPEXC32_EL2 U(0x210) |
| 286 | #define CTX_FPREGS_END U(0x220) /* Align to the next 16 byte boundary */ |
| 287 | #else |
| 288 | #define CTX_FPREGS_END U(0x210) /* Align to the next 16 byte boundary */ |
| 289 | #endif |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 290 | #else |
| 291 | #define CTX_FPREGS_END U(0) |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 292 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 293 | |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 294 | /******************************************************************************* |
| 295 | * Registers related to CVE-2018-3639 |
| 296 | ******************************************************************************/ |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 297 | #define CTX_CVE_2018_3639_OFFSET (CTX_FPREGS_OFFSET + CTX_FPREGS_END) |
| 298 | #define CTX_CVE_2018_3639_DISABLE U(0) |
| 299 | #define CTX_CVE_2018_3639_END U(0x10) /* Align to the next 16 byte boundary */ |
| 300 | |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 301 | /******************************************************************************* |
| 302 | * Registers related to ARMv8.3-PAuth. |
| 303 | ******************************************************************************/ |
| 304 | #define CTX_PAUTH_REGS_OFFSET (CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END) |
| 305 | #if CTX_INCLUDE_PAUTH_REGS |
| 306 | #define CTX_PACIAKEY_LO U(0x0) |
| 307 | #define CTX_PACIAKEY_HI U(0x8) |
| 308 | #define CTX_PACIBKEY_LO U(0x10) |
| 309 | #define CTX_PACIBKEY_HI U(0x18) |
| 310 | #define CTX_PACDAKEY_LO U(0x20) |
| 311 | #define CTX_PACDAKEY_HI U(0x28) |
| 312 | #define CTX_PACDBKEY_LO U(0x30) |
| 313 | #define CTX_PACDBKEY_HI U(0x38) |
| 314 | #define CTX_PACGAKEY_LO U(0x40) |
| 315 | #define CTX_PACGAKEY_HI U(0x48) |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 316 | #define CTX_PAUTH_REGS_END U(0x50) /* Align to the next 16 byte boundary */ |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 317 | #else |
| 318 | #define CTX_PAUTH_REGS_END U(0) |
| 319 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
| 320 | |
Julius Werner | 53456fc | 2019-07-09 13:49:11 -0700 | [diff] [blame] | 321 | #ifndef __ASSEMBLER__ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 322 | |
Dan Handley | 2bd4ef2 | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 323 | #include <stdint.h> |
| 324 | |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 325 | #include <lib/cassert.h> |
| 326 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 327 | /* |
| 328 | * Common constants to help define the 'cpu_context' structure and its |
| 329 | * members below. |
| 330 | */ |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 331 | #define DWORD_SHIFT U(3) |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 332 | #define DEFINE_REG_STRUCT(name, num_regs) \ |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 333 | typedef struct name { \ |
Zelalem | 91d8061 | 2020-02-12 10:37:03 -0600 | [diff] [blame] | 334 | uint64_t ctx_regs[num_regs]; \ |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 335 | } __aligned(16) name##_t |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 336 | |
| 337 | /* Constants to determine the size of individual context structures */ |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 338 | #define CTX_GPREG_ALL (CTX_GPREGS_END >> DWORD_SHIFT) |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 339 | #define CTX_EL1_SYSREGS_ALL (CTX_EL1_SYSREGS_END >> DWORD_SHIFT) |
| 340 | #if CTX_INCLUDE_EL2_REGS |
| 341 | # define CTX_EL2_SYSREGS_ALL (CTX_EL2_SYSREGS_END >> DWORD_SHIFT) |
| 342 | #endif |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 343 | #if CTX_INCLUDE_FPREGS |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 344 | # define CTX_FPREG_ALL (CTX_FPREGS_END >> DWORD_SHIFT) |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 345 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 346 | #define CTX_EL3STATE_ALL (CTX_EL3STATE_END >> DWORD_SHIFT) |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 347 | #define CTX_CVE_2018_3639_ALL (CTX_CVE_2018_3639_END >> DWORD_SHIFT) |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 348 | #if CTX_INCLUDE_PAUTH_REGS |
| 349 | # define CTX_PAUTH_REGS_ALL (CTX_PAUTH_REGS_END >> DWORD_SHIFT) |
| 350 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 351 | |
| 352 | /* |
Soby Mathew | 6c5192a | 2014-04-30 15:36:37 +0100 | [diff] [blame] | 353 | * AArch64 general purpose register context structure. Usually x0-x18, |
| 354 | * lr are saved as the compiler is expected to preserve the remaining |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 355 | * callee saved registers if used by the C runtime and the assembler |
Soby Mathew | 6c5192a | 2014-04-30 15:36:37 +0100 | [diff] [blame] | 356 | * does not touch the remaining. But in case of world switch during |
| 357 | * exception handling, we need to save the callee registers too. |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 358 | */ |
Jeenu Viswambharan | caa8493 | 2014-02-06 10:36:15 +0000 | [diff] [blame] | 359 | DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL); |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 360 | |
| 361 | /* |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 362 | * AArch64 EL1 system register context structure for preserving the |
| 363 | * architectural state during world switches. |
| 364 | */ |
| 365 | DEFINE_REG_STRUCT(el1_sysregs, CTX_EL1_SYSREGS_ALL); |
| 366 | |
| 367 | |
| 368 | /* |
| 369 | * AArch64 EL2 system register context structure for preserving the |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 370 | * architectural state during world switches. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 371 | */ |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 372 | #if CTX_INCLUDE_EL2_REGS |
| 373 | DEFINE_REG_STRUCT(el2_sysregs, CTX_EL2_SYSREGS_ALL); |
| 374 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 375 | |
| 376 | /* |
| 377 | * AArch64 floating point register context structure for preserving |
| 378 | * the floating point state during switches from one security state to |
| 379 | * another. |
| 380 | */ |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 381 | #if CTX_INCLUDE_FPREGS |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 382 | DEFINE_REG_STRUCT(fp_regs, CTX_FPREG_ALL); |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 383 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 384 | |
| 385 | /* |
| 386 | * Miscellaneous registers used by EL3 firmware to maintain its state |
| 387 | * across exception entries and exits |
| 388 | */ |
| 389 | DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL); |
| 390 | |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 391 | /* Function pointer used by CVE-2018-3639 dynamic mitigation */ |
| 392 | DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL); |
| 393 | |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 394 | /* Registers associated to ARMv8.3-PAuth */ |
| 395 | #if CTX_INCLUDE_PAUTH_REGS |
| 396 | DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL); |
| 397 | #endif |
| 398 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 399 | /* |
| 400 | * Macros to access members of any of the above structures using their |
| 401 | * offsets |
| 402 | */ |
Zelalem | 91d8061 | 2020-02-12 10:37:03 -0600 | [diff] [blame] | 403 | #define read_ctx_reg(ctx, offset) ((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) |
| 404 | #define write_ctx_reg(ctx, offset, val) (((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \ |
Jeenu Viswambharan | 32ceef5 | 2018-08-02 10:14:12 +0100 | [diff] [blame] | 405 | = (uint64_t) (val)) |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 406 | |
| 407 | /* |
Zelalem Aweke | b6301e6 | 2021-07-09 17:54:30 -0500 | [diff] [blame] | 408 | * Top-level context structure which is used by EL3 firmware to preserve |
| 409 | * the state of a core at the next lower EL in a given security state and |
| 410 | * save enough EL3 meta data to be able to return to that EL and security |
| 411 | * state. The context management library will be used to ensure that |
| 412 | * SP_EL3 always points to an instance of this structure at exception |
| 413 | * entry and exit. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 414 | */ |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 415 | typedef struct cpu_context { |
| 416 | gp_regs_t gpregs_ctx; |
| 417 | el3_state_t el3state_ctx; |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 418 | el1_sysregs_t el1_sysregs_ctx; |
| 419 | #if CTX_INCLUDE_EL2_REGS |
| 420 | el2_sysregs_t el2_sysregs_ctx; |
| 421 | #endif |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 422 | #if CTX_INCLUDE_FPREGS |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 423 | fp_regs_t fpregs_ctx; |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 424 | #endif |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 425 | cve_2018_3639_t cve_2018_3639_ctx; |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 426 | #if CTX_INCLUDE_PAUTH_REGS |
| 427 | pauth_t pauth_ctx; |
| 428 | #endif |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 429 | } cpu_context_t; |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 430 | |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 431 | /* Macros to access members of the 'cpu_context_t' structure */ |
| 432 | #define get_el3state_ctx(h) (&((cpu_context_t *) h)->el3state_ctx) |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 433 | #if CTX_INCLUDE_FPREGS |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 434 | # define get_fpregs_ctx(h) (&((cpu_context_t *) h)->fpregs_ctx) |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 435 | #endif |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 436 | #define get_el1_sysregs_ctx(h) (&((cpu_context_t *) h)->el1_sysregs_ctx) |
| 437 | #if CTX_INCLUDE_EL2_REGS |
| 438 | # define get_el2_sysregs_ctx(h) (&((cpu_context_t *) h)->el2_sysregs_ctx) |
| 439 | #endif |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 440 | #define get_gpregs_ctx(h) (&((cpu_context_t *) h)->gpregs_ctx) |
Dimitris Papastamos | bb1fd5b | 2018-06-07 11:29:15 +0100 | [diff] [blame] | 441 | #define get_cve_2018_3639_ctx(h) (&((cpu_context_t *) h)->cve_2018_3639_ctx) |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 442 | #if CTX_INCLUDE_PAUTH_REGS |
| 443 | # define get_pauth_ctx(h) (&((cpu_context_t *) h)->pauth_ctx) |
| 444 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 445 | |
| 446 | /* |
| 447 | * Compile time assertions related to the 'cpu_context' structure to |
| 448 | * ensure that the assembler and the compiler view of the offsets of |
| 449 | * the structure members is the same. |
| 450 | */ |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 451 | CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx), \ |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 452 | assert_core_context_gp_offset_mismatch); |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 453 | CASSERT(CTX_EL1_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el1_sysregs_ctx), \ |
| 454 | assert_core_context_el1_sys_offset_mismatch); |
| 455 | #if CTX_INCLUDE_EL2_REGS |
| 456 | CASSERT(CTX_EL2_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el2_sysregs_ctx), \ |
| 457 | assert_core_context_el2_sys_offset_mismatch); |
| 458 | #endif |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 459 | #if CTX_INCLUDE_FPREGS |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 460 | CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx), \ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 461 | assert_core_context_fp_offset_mismatch); |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 462 | #endif |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 463 | CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx), \ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 464 | assert_core_context_el3state_offset_mismatch); |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 465 | CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx), \ |
| 466 | assert_core_context_cve_2018_3639_offset_mismatch); |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 467 | #if CTX_INCLUDE_PAUTH_REGS |
| 468 | CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx), \ |
| 469 | assert_core_context_pauth_offset_mismatch); |
| 470 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 471 | |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 472 | /* |
| 473 | * Helper macro to set the general purpose registers that correspond to |
| 474 | * parameters in an aapcs_64 call i.e. x0-x7 |
| 475 | */ |
| 476 | #define set_aapcs_args0(ctx, x0) do { \ |
| 477 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0, x0); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 478 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 479 | #define set_aapcs_args1(ctx, x0, x1) do { \ |
| 480 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1, x1); \ |
| 481 | set_aapcs_args0(ctx, x0); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 482 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 483 | #define set_aapcs_args2(ctx, x0, x1, x2) do { \ |
| 484 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2, x2); \ |
| 485 | set_aapcs_args1(ctx, x0, x1); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 486 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 487 | #define set_aapcs_args3(ctx, x0, x1, x2, x3) do { \ |
| 488 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3, x3); \ |
| 489 | set_aapcs_args2(ctx, x0, x1, x2); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 490 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 491 | #define set_aapcs_args4(ctx, x0, x1, x2, x3, x4) do { \ |
| 492 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4, x4); \ |
| 493 | set_aapcs_args3(ctx, x0, x1, x2, x3); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 494 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 495 | #define set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5) do { \ |
| 496 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X5, x5); \ |
| 497 | set_aapcs_args4(ctx, x0, x1, x2, x3, x4); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 498 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 499 | #define set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6) do { \ |
| 500 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X6, x6); \ |
| 501 | set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 502 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 503 | #define set_aapcs_args7(ctx, x0, x1, x2, x3, x4, x5, x6, x7) do { \ |
| 504 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X7, x7); \ |
| 505 | set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 506 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 507 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 508 | /******************************************************************************* |
| 509 | * Function prototypes |
| 510 | ******************************************************************************/ |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 511 | void el1_sysregs_context_save(el1_sysregs_t *regs); |
| 512 | void el1_sysregs_context_restore(el1_sysregs_t *regs); |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 513 | |
| 514 | #if CTX_INCLUDE_EL2_REGS |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 515 | void el2_sysregs_context_save(el2_sysregs_t *regs); |
| 516 | void el2_sysregs_context_restore(el2_sysregs_t *regs); |
Max Shvetsov | bdf502d | 2020-02-25 13:56:19 +0000 | [diff] [blame] | 517 | #endif |
| 518 | |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 519 | #if CTX_INCLUDE_FPREGS |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 520 | void fpregs_context_save(fp_regs_t *regs); |
| 521 | void fpregs_context_restore(fp_regs_t *regs); |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 522 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 523 | |
Julius Werner | 53456fc | 2019-07-09 13:49:11 -0700 | [diff] [blame] | 524 | #endif /* __ASSEMBLER__ */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 525 | |
Antonio Nino Diaz | 864ca6f | 2018-10-31 15:25:35 +0000 | [diff] [blame] | 526 | #endif /* CONTEXT_H */ |