Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 1 | /* |
Jayanth Dodderi Chidanand | 4d5a8c5 | 2024-01-09 11:28:21 +0000 | [diff] [blame] | 2 | * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 3 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 4 | * SPDX-License-Identifier: BSD-3-Clause |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 5 | */ |
| 6 | |
Antonio Nino Diaz | 864ca6f | 2018-10-31 15:25:35 +0000 | [diff] [blame] | 7 | #ifndef CONTEXT_H |
| 8 | #define CONTEXT_H |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 9 | |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 10 | #include <lib/el3_runtime/context_el2.h> |
Elizabeth Ho | 4fc00d2 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 11 | #include <lib/el3_runtime/cpu_data.h> |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 12 | #include <lib/utils_def.h> |
Jeenu Viswambharan | 96c7df0 | 2017-11-30 12:54:15 +0000 | [diff] [blame] | 13 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 14 | /******************************************************************************* |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 15 | * Constants that allow assembler code to access members of and the 'gp_regs' |
| 16 | * structure at their correct offsets. |
| 17 | ******************************************************************************/ |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 18 | #define CTX_GPREGS_OFFSET U(0x0) |
| 19 | #define CTX_GPREG_X0 U(0x0) |
| 20 | #define CTX_GPREG_X1 U(0x8) |
| 21 | #define CTX_GPREG_X2 U(0x10) |
| 22 | #define CTX_GPREG_X3 U(0x18) |
| 23 | #define CTX_GPREG_X4 U(0x20) |
| 24 | #define CTX_GPREG_X5 U(0x28) |
| 25 | #define CTX_GPREG_X6 U(0x30) |
| 26 | #define CTX_GPREG_X7 U(0x38) |
| 27 | #define CTX_GPREG_X8 U(0x40) |
| 28 | #define CTX_GPREG_X9 U(0x48) |
| 29 | #define CTX_GPREG_X10 U(0x50) |
| 30 | #define CTX_GPREG_X11 U(0x58) |
| 31 | #define CTX_GPREG_X12 U(0x60) |
| 32 | #define CTX_GPREG_X13 U(0x68) |
| 33 | #define CTX_GPREG_X14 U(0x70) |
| 34 | #define CTX_GPREG_X15 U(0x78) |
| 35 | #define CTX_GPREG_X16 U(0x80) |
| 36 | #define CTX_GPREG_X17 U(0x88) |
| 37 | #define CTX_GPREG_X18 U(0x90) |
| 38 | #define CTX_GPREG_X19 U(0x98) |
| 39 | #define CTX_GPREG_X20 U(0xa0) |
| 40 | #define CTX_GPREG_X21 U(0xa8) |
| 41 | #define CTX_GPREG_X22 U(0xb0) |
| 42 | #define CTX_GPREG_X23 U(0xb8) |
| 43 | #define CTX_GPREG_X24 U(0xc0) |
| 44 | #define CTX_GPREG_X25 U(0xc8) |
| 45 | #define CTX_GPREG_X26 U(0xd0) |
| 46 | #define CTX_GPREG_X27 U(0xd8) |
| 47 | #define CTX_GPREG_X28 U(0xe0) |
| 48 | #define CTX_GPREG_X29 U(0xe8) |
| 49 | #define CTX_GPREG_LR U(0xf0) |
| 50 | #define CTX_GPREG_SP_EL0 U(0xf8) |
| 51 | #define CTX_GPREGS_END U(0x100) |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 52 | |
| 53 | /******************************************************************************* |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 54 | * Constants that allow assembler code to access members of and the 'el3_state' |
| 55 | * structure at their correct offsets. Note that some of the registers are only |
| 56 | * 32-bits wide but are stored as 64-bit values for convenience |
| 57 | ******************************************************************************/ |
Dimitris Papastamos | b63c6f1 | 2018-01-11 15:29:36 +0000 | [diff] [blame] | 58 | #define CTX_EL3STATE_OFFSET (CTX_GPREGS_OFFSET + CTX_GPREGS_END) |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 59 | #define CTX_SCR_EL3 U(0x0) |
Jeenu Viswambharan | 96c7df0 | 2017-11-30 12:54:15 +0000 | [diff] [blame] | 60 | #define CTX_ESR_EL3 U(0x8) |
| 61 | #define CTX_RUNTIME_SP U(0x10) |
| 62 | #define CTX_SPSR_EL3 U(0x18) |
| 63 | #define CTX_ELR_EL3 U(0x20) |
Alexei Fedorov | 503bbf3 | 2019-08-13 15:17:53 +0100 | [diff] [blame] | 64 | #define CTX_PMCR_EL0 U(0x28) |
Madhukar Pappireddy | fba2572 | 2020-07-24 03:27:12 -0500 | [diff] [blame] | 65 | #define CTX_IS_IN_EL3 U(0x30) |
Jayanth Dodderi Chidanand | 118b335 | 2024-06-18 15:22:54 +0100 | [diff] [blame] | 66 | #define CTX_MDCR_EL3 U(0x38) |
Manish Pandey | 07952fb | 2023-05-25 13:46:14 +0100 | [diff] [blame] | 67 | /* Constants required in supporting nested exception in EL3 */ |
Jayanth Dodderi Chidanand | 118b335 | 2024-06-18 15:22:54 +0100 | [diff] [blame] | 68 | #define CTX_SAVED_ELR_EL3 U(0x40) |
Manish Pandey | 07952fb | 2023-05-25 13:46:14 +0100 | [diff] [blame] | 69 | /* |
| 70 | * General purpose flag, to save various EL3 states |
| 71 | * FFH mode : Used to identify if handling nested exception |
| 72 | * KFH mode : Used as counter value |
| 73 | */ |
Jayanth Dodderi Chidanand | 118b335 | 2024-06-18 15:22:54 +0100 | [diff] [blame] | 74 | #define CTX_NESTED_EA_FLAG U(0x48) |
Manish Pandey | f90a73c | 2023-10-10 15:42:19 +0100 | [diff] [blame] | 75 | #if FFH_SUPPORT |
Jayanth Dodderi Chidanand | 118b335 | 2024-06-18 15:22:54 +0100 | [diff] [blame] | 76 | #define CTX_SAVED_ESR_EL3 U(0x50) |
| 77 | #define CTX_SAVED_SPSR_EL3 U(0x58) |
| 78 | #define CTX_SAVED_GPREG_LR U(0x60) |
| 79 | #define CTX_EL3STATE_END U(0x70) /* Align to the next 16 byte boundary */ |
Manish Pandey | 07952fb | 2023-05-25 13:46:14 +0100 | [diff] [blame] | 80 | #else |
| 81 | #define CTX_EL3STATE_END U(0x50) /* Align to the next 16 byte boundary */ |
Arvind Ram Prakash | b5d9559 | 2023-11-08 12:28:30 -0600 | [diff] [blame] | 82 | #endif /* FFH_SUPPORT */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 83 | |
| 84 | /******************************************************************************* |
| 85 | * Constants that allow assembler code to access members of and the |
| 86 | * 'el1_sys_regs' structure at their correct offsets. Note that some of the |
| 87 | * registers are only 32-bits wide but are stored as 64-bit values for |
| 88 | * convenience |
| 89 | ******************************************************************************/ |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 90 | #define CTX_EL1_SYSREGS_OFFSET (CTX_EL3STATE_OFFSET + CTX_EL3STATE_END) |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 91 | #define CTX_SPSR_EL1 U(0x0) |
| 92 | #define CTX_ELR_EL1 U(0x8) |
| 93 | #define CTX_SCTLR_EL1 U(0x10) |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 94 | #define CTX_TCR_EL1 U(0x18) |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 95 | #define CTX_CPACR_EL1 U(0x20) |
| 96 | #define CTX_CSSELR_EL1 U(0x28) |
| 97 | #define CTX_SP_EL1 U(0x30) |
| 98 | #define CTX_ESR_EL1 U(0x38) |
| 99 | #define CTX_TTBR0_EL1 U(0x40) |
| 100 | #define CTX_TTBR1_EL1 U(0x48) |
| 101 | #define CTX_MAIR_EL1 U(0x50) |
| 102 | #define CTX_AMAIR_EL1 U(0x58) |
Manish V Badarkhe | 2b0ee97 | 2020-07-28 07:22:30 +0100 | [diff] [blame] | 103 | #define CTX_ACTLR_EL1 U(0x60) |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 104 | #define CTX_TPIDR_EL1 U(0x68) |
| 105 | #define CTX_TPIDR_EL0 U(0x70) |
| 106 | #define CTX_TPIDRRO_EL0 U(0x78) |
| 107 | #define CTX_PAR_EL1 U(0x80) |
| 108 | #define CTX_FAR_EL1 U(0x88) |
| 109 | #define CTX_AFSR0_EL1 U(0x90) |
| 110 | #define CTX_AFSR1_EL1 U(0x98) |
| 111 | #define CTX_CONTEXTIDR_EL1 U(0xa0) |
| 112 | #define CTX_VBAR_EL1 U(0xa8) |
Madhukar Pappireddy | bf9cb5f | 2024-03-25 17:49:00 -0500 | [diff] [blame] | 113 | #define CTX_MDCCINT_EL1 U(0xb0) |
| 114 | #define CTX_MDSCR_EL1 U(0xb8) |
| 115 | |
| 116 | #define CTX_AARCH64_END U(0xc0) /* Align to the next 16 byte boundary */ |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 117 | |
| 118 | /* |
| 119 | * If the platform is AArch64-only, there is no need to save and restore these |
| 120 | * AArch32 registers. |
| 121 | */ |
| 122 | #if CTX_INCLUDE_AARCH32_REGS |
Madhukar Pappireddy | bf9cb5f | 2024-03-25 17:49:00 -0500 | [diff] [blame] | 123 | #define CTX_SPSR_ABT (CTX_AARCH64_END + U(0x0)) |
| 124 | #define CTX_SPSR_UND (CTX_AARCH64_END + U(0x8)) |
| 125 | #define CTX_SPSR_IRQ (CTX_AARCH64_END + U(0x10)) |
| 126 | #define CTX_SPSR_FIQ (CTX_AARCH64_END + U(0x18)) |
| 127 | #define CTX_DACR32_EL2 (CTX_AARCH64_END + U(0x20)) |
| 128 | #define CTX_IFSR32_EL2 (CTX_AARCH64_END + U(0x28)) |
| 129 | #define CTX_AARCH32_END (CTX_AARCH64_END + U(0x30)) /* Align to the next 16 byte boundary */ |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 130 | #else |
Madhukar Pappireddy | bf9cb5f | 2024-03-25 17:49:00 -0500 | [diff] [blame] | 131 | #define CTX_AARCH32_END CTX_AARCH64_END |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 132 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
Soby Mathew | d75d2ba | 2016-05-17 14:01:32 +0100 | [diff] [blame] | 133 | |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 134 | /* |
| 135 | * If the timer registers aren't saved and restored, we don't have to reserve |
| 136 | * space for them in the context |
| 137 | */ |
| 138 | #if NS_TIMER_SWITCH |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 139 | #define CTX_CNTP_CTL_EL0 (CTX_AARCH32_END + U(0x0)) |
| 140 | #define CTX_CNTP_CVAL_EL0 (CTX_AARCH32_END + U(0x8)) |
| 141 | #define CTX_CNTV_CTL_EL0 (CTX_AARCH32_END + U(0x10)) |
| 142 | #define CTX_CNTV_CVAL_EL0 (CTX_AARCH32_END + U(0x18)) |
| 143 | #define CTX_CNTKCTL_EL1 (CTX_AARCH32_END + U(0x20)) |
| 144 | #define CTX_TIMER_SYSREGS_END (CTX_AARCH32_END + U(0x30)) /* Align to the next 16 byte boundary */ |
Jeenu Viswambharan | d1b6015 | 2014-05-12 15:28:47 +0100 | [diff] [blame] | 145 | #else |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 146 | #define CTX_TIMER_SYSREGS_END CTX_AARCH32_END |
| 147 | #endif /* NS_TIMER_SWITCH */ |
| 148 | |
Govindraj Raja | c1be66f | 2024-03-07 14:42:20 -0600 | [diff] [blame] | 149 | #if ENABLE_FEAT_MTE2 |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 150 | #define CTX_TFSRE0_EL1 (CTX_TIMER_SYSREGS_END + U(0x0)) |
| 151 | #define CTX_TFSR_EL1 (CTX_TIMER_SYSREGS_END + U(0x8)) |
| 152 | #define CTX_RGSR_EL1 (CTX_TIMER_SYSREGS_END + U(0x10)) |
| 153 | #define CTX_GCR_EL1 (CTX_TIMER_SYSREGS_END + U(0x18)) |
Madhukar Pappireddy | bf9cb5f | 2024-03-25 17:49:00 -0500 | [diff] [blame] | 154 | #define CTX_MTE_REGS_END (CTX_TIMER_SYSREGS_END + U(0x20)) /* Align to the next 16 byte boundary */ |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 155 | #else |
| 156 | #define CTX_MTE_REGS_END CTX_TIMER_SYSREGS_END |
Govindraj Raja | c1be66f | 2024-03-07 14:42:20 -0600 | [diff] [blame] | 157 | #endif /* ENABLE_FEAT_MTE2 */ |
Justin Chadwell | 1c7c13a | 2019-07-18 14:25:33 +0100 | [diff] [blame] | 158 | |
Madhukar Pappireddy | bf9cb5f | 2024-03-25 17:49:00 -0500 | [diff] [blame] | 159 | #if ENABLE_FEAT_RAS |
| 160 | #define CTX_DISR_EL1 (CTX_MTE_REGS_END + U(0x0)) |
| 161 | #define CTX_RAS_REGS_END (CTX_MTE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */ |
| 162 | #else |
| 163 | #define CTX_RAS_REGS_END CTX_MTE_REGS_END |
| 164 | #endif /* ENABLE_FEAT_RAS */ |
| 165 | |
| 166 | #if ENABLE_FEAT_S1PIE |
| 167 | #define CTX_PIRE0_EL1 (CTX_RAS_REGS_END + U(0x0)) |
| 168 | #define CTX_PIR_EL1 (CTX_RAS_REGS_END + U(0x8)) |
| 169 | #define CTX_S1PIE_REGS_END (CTX_RAS_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */ |
| 170 | #else |
| 171 | #define CTX_S1PIE_REGS_END CTX_RAS_REGS_END |
| 172 | #endif /* ENABLE_FEAT_S1PIE */ |
| 173 | |
| 174 | #if ENABLE_FEAT_S1POE |
| 175 | #define CTX_POR_EL1 (CTX_S1PIE_REGS_END + U(0x0)) |
| 176 | #define CTX_S1POE_REGS_END (CTX_S1PIE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */ |
| 177 | #else |
| 178 | #define CTX_S1POE_REGS_END CTX_S1PIE_REGS_END |
| 179 | #endif /* ENABLE_FEAT_S1POE */ |
| 180 | |
| 181 | #if ENABLE_FEAT_S2POE |
| 182 | #define CTX_S2POR_EL1 (CTX_S1POE_REGS_END + U(0x0)) |
| 183 | #define CTX_S2POE_REGS_END (CTX_S1POE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */ |
| 184 | #else |
| 185 | #define CTX_S2POE_REGS_END CTX_S1POE_REGS_END |
| 186 | #endif /* ENABLE_FEAT_S2POE */ |
| 187 | |
| 188 | #if ENABLE_FEAT_TCR2 |
| 189 | #define CTX_TCR2_EL1 (CTX_S2POE_REGS_END + U(0x0)) |
| 190 | #define CTX_TCR2_REGS_END (CTX_S2POE_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */ |
| 191 | #else |
| 192 | #define CTX_TCR2_REGS_END CTX_S2POE_REGS_END |
| 193 | #endif /* ENABLE_FEAT_TCR2 */ |
| 194 | |
Madhukar Pappireddy | 739e8c7 | 2024-04-17 17:07:13 -0500 | [diff] [blame] | 195 | #if ENABLE_TRF_FOR_NS |
| 196 | #define CTX_TRFCR_EL1 (CTX_TCR2_REGS_END + U(0x0)) |
| 197 | #define CTX_TRF_REGS_END (CTX_TCR2_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */ |
| 198 | #else |
| 199 | #define CTX_TRF_REGS_END CTX_TCR2_REGS_END |
| 200 | #endif /* ENABLE_TRF_FOR_NS */ |
| 201 | |
| 202 | #if ENABLE_FEAT_CSV2_2 |
| 203 | #define CTX_SCXTNUM_EL0 (CTX_TRF_REGS_END + U(0x0)) |
| 204 | #define CTX_SCXTNUM_EL1 (CTX_TRF_REGS_END + U(0x8)) |
| 205 | #define CTX_CSV2_2_REGS_END (CTX_TRF_REGS_END + U(0x10)) /* Align to the next 16 byte boundary */ |
| 206 | #else |
| 207 | #define CTX_CSV2_2_REGS_END CTX_TRF_REGS_END |
| 208 | #endif /* ENABLE_FEAT_CSV2_2 */ |
| 209 | |
| 210 | #if ENABLE_FEAT_GCS |
| 211 | #define CTX_GCSCR_EL1 (CTX_CSV2_2_REGS_END + U(0x0)) |
| 212 | #define CTX_GCSCRE0_EL1 (CTX_CSV2_2_REGS_END + U(0x8)) |
| 213 | #define CTX_GCSPR_EL1 (CTX_CSV2_2_REGS_END + U(0x10)) |
| 214 | #define CTX_GCSPR_EL0 (CTX_CSV2_2_REGS_END + U(0x18)) |
| 215 | #define CTX_GCS_REGS_END (CTX_CSV2_2_REGS_END + U(0x20)) /* Align to the next 16 byte boundary */ |
| 216 | #else |
| 217 | #define CTX_GCS_REGS_END CTX_CSV2_2_REGS_END |
| 218 | #endif /* ENABLE_FEAT_GCS */ |
| 219 | |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 220 | /* |
Madhukar Pappireddy | 739e8c7 | 2024-04-17 17:07:13 -0500 | [diff] [blame] | 221 | * End of EL1 system registers. |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 222 | */ |
Madhukar Pappireddy | 739e8c7 | 2024-04-17 17:07:13 -0500 | [diff] [blame] | 223 | #define CTX_EL1_SYSREGS_END CTX_GCS_REGS_END |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 224 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 225 | /******************************************************************************* |
| 226 | * Constants that allow assembler code to access members of and the 'fp_regs' |
| 227 | * structure at their correct offsets. |
| 228 | ******************************************************************************/ |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 229 | # define CTX_FPREGS_OFFSET (CTX_EL1_SYSREGS_OFFSET + CTX_EL1_SYSREGS_END) |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 230 | #if CTX_INCLUDE_FPREGS |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 231 | #define CTX_FP_Q0 U(0x0) |
| 232 | #define CTX_FP_Q1 U(0x10) |
| 233 | #define CTX_FP_Q2 U(0x20) |
| 234 | #define CTX_FP_Q3 U(0x30) |
| 235 | #define CTX_FP_Q4 U(0x40) |
| 236 | #define CTX_FP_Q5 U(0x50) |
| 237 | #define CTX_FP_Q6 U(0x60) |
| 238 | #define CTX_FP_Q7 U(0x70) |
| 239 | #define CTX_FP_Q8 U(0x80) |
| 240 | #define CTX_FP_Q9 U(0x90) |
| 241 | #define CTX_FP_Q10 U(0xa0) |
| 242 | #define CTX_FP_Q11 U(0xb0) |
| 243 | #define CTX_FP_Q12 U(0xc0) |
| 244 | #define CTX_FP_Q13 U(0xd0) |
| 245 | #define CTX_FP_Q14 U(0xe0) |
| 246 | #define CTX_FP_Q15 U(0xf0) |
| 247 | #define CTX_FP_Q16 U(0x100) |
| 248 | #define CTX_FP_Q17 U(0x110) |
| 249 | #define CTX_FP_Q18 U(0x120) |
| 250 | #define CTX_FP_Q19 U(0x130) |
| 251 | #define CTX_FP_Q20 U(0x140) |
| 252 | #define CTX_FP_Q21 U(0x150) |
| 253 | #define CTX_FP_Q22 U(0x160) |
| 254 | #define CTX_FP_Q23 U(0x170) |
| 255 | #define CTX_FP_Q24 U(0x180) |
| 256 | #define CTX_FP_Q25 U(0x190) |
| 257 | #define CTX_FP_Q26 U(0x1a0) |
| 258 | #define CTX_FP_Q27 U(0x1b0) |
| 259 | #define CTX_FP_Q28 U(0x1c0) |
| 260 | #define CTX_FP_Q29 U(0x1d0) |
| 261 | #define CTX_FP_Q30 U(0x1e0) |
| 262 | #define CTX_FP_Q31 U(0x1f0) |
| 263 | #define CTX_FP_FPSR U(0x200) |
| 264 | #define CTX_FP_FPCR U(0x208) |
David Cunado | d1a1fd4 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 265 | #if CTX_INCLUDE_AARCH32_REGS |
| 266 | #define CTX_FP_FPEXC32_EL2 U(0x210) |
| 267 | #define CTX_FPREGS_END U(0x220) /* Align to the next 16 byte boundary */ |
| 268 | #else |
| 269 | #define CTX_FPREGS_END U(0x210) /* Align to the next 16 byte boundary */ |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 270 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 271 | #else |
| 272 | #define CTX_FPREGS_END U(0) |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 273 | #endif /* CTX_INCLUDE_FPREGS */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 274 | |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 275 | /******************************************************************************* |
| 276 | * Registers related to CVE-2018-3639 |
| 277 | ******************************************************************************/ |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 278 | #define CTX_CVE_2018_3639_OFFSET (CTX_FPREGS_OFFSET + CTX_FPREGS_END) |
| 279 | #define CTX_CVE_2018_3639_DISABLE U(0) |
| 280 | #define CTX_CVE_2018_3639_END U(0x10) /* Align to the next 16 byte boundary */ |
| 281 | |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 282 | /******************************************************************************* |
Jayanth Dodderi Chidanand | 3a71df6 | 2024-06-05 11:13:05 +0100 | [diff] [blame^] | 283 | * Registers related to ERRATA_SPECULATIVE_AT |
| 284 | * |
| 285 | * This is essential as with EL1 and EL2 context registers being decoupled, |
| 286 | * both will not be present for a given build configuration. |
| 287 | * As ERRATA_SPECULATIVE_AT errata requires SCTLR_EL1 and TCR_EL1 registers |
| 288 | * independent of the above logic, we need explicit context entries to be |
| 289 | * reserved for these registers. |
| 290 | * |
| 291 | * NOTE: Based on this we end up with following different configurations depending |
| 292 | * on the presence of errata and inclusion of EL1 or EL2 context. |
| 293 | * |
| 294 | * ============================================================================ |
| 295 | * | ERRATA_SPECULATIVE_AT | EL1 context| Memory allocation(Sctlr_el1,Tcr_el1)| |
| 296 | * ============================================================================ |
| 297 | * | 0 | 0 | None | |
| 298 | * | 0 | 1 | EL1 C-Context structure | |
| 299 | * | 1 | 0 | Errata Context Offset Entries | |
| 300 | * | 1 | 1 | Errata Context Offset Entries | |
| 301 | * ============================================================================ |
| 302 | * |
| 303 | * In the above table, when ERRATA_SPECULATIVE_AT=1, EL1_Context=0, it implies |
| 304 | * there is only EL2 context and memory for SCTLR_EL1 and TCR_EL1 registers is |
| 305 | * reserved explicitly under ERRATA_SPECULATIVE_AT build flag here. |
| 306 | * |
| 307 | * In situations when EL1_Context=1 and ERRATA_SPECULATIVE_AT=1, since SCTLR_EL1 |
| 308 | * and TCR_EL1 registers will be modified under errata and it happens at the |
| 309 | * early in the codeflow prior to el1 context (save and restore operations), |
| 310 | * context memory still will be reserved under the errata logic here explicitly. |
| 311 | * These registers will not be part of EL1 context save & restore routines. |
| 312 | * |
| 313 | * Only when ERRATA_SPECULATIVE_AT=0, EL1_Context=1, for this combination, |
| 314 | * SCTLR_EL1 and TCR_EL1 will be part of EL1 context structure (context_el1.h) |
| 315 | * ----------------------------------------------------------------------------- |
| 316 | ******************************************************************************/ |
| 317 | #define CTX_ERRATA_SPEC_AT_OFFSET (CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_END) |
| 318 | #if ERRATA_SPECULATIVE_AT |
| 319 | #define CTX_ERRATA_SPEC_AT_SCTLR_EL1 U(0x0) |
| 320 | #define CTX_ERRATA_SPEC_AT_TCR_EL1 U(0x8) |
| 321 | #define CTX_ERRATA_SPEC_AT_END U(0x10) /* Align to the next 16 byte boundary */ |
| 322 | #else |
| 323 | #define CTX_ERRATA_SPEC_AT_END U(0x0) |
| 324 | #endif /* ERRATA_SPECULATIVE_AT */ |
| 325 | |
| 326 | /******************************************************************************* |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 327 | * Registers related to ARMv8.3-PAuth. |
| 328 | ******************************************************************************/ |
Jayanth Dodderi Chidanand | 3a71df6 | 2024-06-05 11:13:05 +0100 | [diff] [blame^] | 329 | #define CTX_PAUTH_REGS_OFFSET (CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_END) |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 330 | #if CTX_INCLUDE_PAUTH_REGS |
| 331 | #define CTX_PACIAKEY_LO U(0x0) |
| 332 | #define CTX_PACIAKEY_HI U(0x8) |
| 333 | #define CTX_PACIBKEY_LO U(0x10) |
| 334 | #define CTX_PACIBKEY_HI U(0x18) |
| 335 | #define CTX_PACDAKEY_LO U(0x20) |
| 336 | #define CTX_PACDAKEY_HI U(0x28) |
| 337 | #define CTX_PACDBKEY_LO U(0x30) |
| 338 | #define CTX_PACDBKEY_HI U(0x38) |
| 339 | #define CTX_PACGAKEY_LO U(0x40) |
| 340 | #define CTX_PACGAKEY_HI U(0x48) |
Alexei Fedorov | f41355c | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 341 | #define CTX_PAUTH_REGS_END U(0x50) /* Align to the next 16 byte boundary */ |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 342 | #else |
| 343 | #define CTX_PAUTH_REGS_END U(0) |
| 344 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
| 345 | |
Elizabeth Ho | 4fc00d2 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 346 | /******************************************************************************* |
| 347 | * Registers initialised in a per-world context. |
| 348 | ******************************************************************************/ |
Jayanth Dodderi Chidanand | 56aa382 | 2023-12-11 11:22:02 +0000 | [diff] [blame] | 349 | #define CTX_CPTR_EL3 U(0x0) |
| 350 | #define CTX_ZCR_EL3 U(0x8) |
Arvind Ram Prakash | b5d9559 | 2023-11-08 12:28:30 -0600 | [diff] [blame] | 351 | #define CTX_MPAM3_EL3 U(0x10) |
| 352 | #define CTX_PERWORLD_EL3STATE_END U(0x18) |
Elizabeth Ho | 4fc00d2 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 353 | |
Julius Werner | 53456fc | 2019-07-09 13:49:11 -0700 | [diff] [blame] | 354 | #ifndef __ASSEMBLER__ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 355 | |
Dan Handley | 2bd4ef2 | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 356 | #include <stdint.h> |
| 357 | |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 358 | #include <lib/cassert.h> |
| 359 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 360 | /* |
| 361 | * Common constants to help define the 'cpu_context' structure and its |
| 362 | * members below. |
| 363 | */ |
Varun Wadekar | c6a11f6 | 2017-05-25 18:04:48 -0700 | [diff] [blame] | 364 | #define DWORD_SHIFT U(3) |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 365 | #define DEFINE_REG_STRUCT(name, num_regs) \ |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 366 | typedef struct name { \ |
Zelalem | 91d8061 | 2020-02-12 10:37:03 -0600 | [diff] [blame] | 367 | uint64_t ctx_regs[num_regs]; \ |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 368 | } __aligned(16) name##_t |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 369 | |
| 370 | /* Constants to determine the size of individual context structures */ |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 371 | #define CTX_GPREG_ALL (CTX_GPREGS_END >> DWORD_SHIFT) |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 372 | #define CTX_EL1_SYSREGS_ALL (CTX_EL1_SYSREGS_END >> DWORD_SHIFT) |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 373 | |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 374 | #if CTX_INCLUDE_FPREGS |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 375 | # define CTX_FPREG_ALL (CTX_FPREGS_END >> DWORD_SHIFT) |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 376 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 377 | #define CTX_EL3STATE_ALL (CTX_EL3STATE_END >> DWORD_SHIFT) |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 378 | #define CTX_CVE_2018_3639_ALL (CTX_CVE_2018_3639_END >> DWORD_SHIFT) |
Jayanth Dodderi Chidanand | 3a71df6 | 2024-06-05 11:13:05 +0100 | [diff] [blame^] | 379 | |
| 380 | #if ERRATA_SPECULATIVE_AT |
| 381 | #define CTX_ERRATA_SPEC_AT_ALL (CTX_ERRATA_SPEC_AT_END >> DWORD_SHIFT) |
| 382 | #endif |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 383 | #if CTX_INCLUDE_PAUTH_REGS |
| 384 | # define CTX_PAUTH_REGS_ALL (CTX_PAUTH_REGS_END >> DWORD_SHIFT) |
| 385 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 386 | |
| 387 | /* |
Soby Mathew | 6c5192a | 2014-04-30 15:36:37 +0100 | [diff] [blame] | 388 | * AArch64 general purpose register context structure. Usually x0-x18, |
| 389 | * lr are saved as the compiler is expected to preserve the remaining |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 390 | * callee saved registers if used by the C runtime and the assembler |
Soby Mathew | 6c5192a | 2014-04-30 15:36:37 +0100 | [diff] [blame] | 391 | * does not touch the remaining. But in case of world switch during |
| 392 | * exception handling, we need to save the callee registers too. |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 393 | */ |
Jeenu Viswambharan | caa8493 | 2014-02-06 10:36:15 +0000 | [diff] [blame] | 394 | DEFINE_REG_STRUCT(gp_regs, CTX_GPREG_ALL); |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 395 | |
| 396 | /* |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 397 | * AArch64 EL1 system register context structure for preserving the |
| 398 | * architectural state during world switches. |
| 399 | */ |
| 400 | DEFINE_REG_STRUCT(el1_sysregs, CTX_EL1_SYSREGS_ALL); |
| 401 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 402 | /* |
| 403 | * AArch64 floating point register context structure for preserving |
| 404 | * the floating point state during switches from one security state to |
| 405 | * another. |
| 406 | */ |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 407 | #if CTX_INCLUDE_FPREGS |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 408 | DEFINE_REG_STRUCT(fp_regs, CTX_FPREG_ALL); |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 409 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 410 | |
| 411 | /* |
| 412 | * Miscellaneous registers used by EL3 firmware to maintain its state |
| 413 | * across exception entries and exits |
| 414 | */ |
| 415 | DEFINE_REG_STRUCT(el3_state, CTX_EL3STATE_ALL); |
| 416 | |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 417 | /* Function pointer used by CVE-2018-3639 dynamic mitigation */ |
| 418 | DEFINE_REG_STRUCT(cve_2018_3639, CTX_CVE_2018_3639_ALL); |
| 419 | |
Jayanth Dodderi Chidanand | 3a71df6 | 2024-06-05 11:13:05 +0100 | [diff] [blame^] | 420 | /* Registers associated to Errata_Speculative */ |
| 421 | #if ERRATA_SPECULATIVE_AT |
| 422 | DEFINE_REG_STRUCT(errata_speculative_at, CTX_ERRATA_SPEC_AT_ALL); |
| 423 | #endif |
| 424 | |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 425 | /* Registers associated to ARMv8.3-PAuth */ |
| 426 | #if CTX_INCLUDE_PAUTH_REGS |
| 427 | DEFINE_REG_STRUCT(pauth, CTX_PAUTH_REGS_ALL); |
| 428 | #endif |
| 429 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 430 | /* |
| 431 | * Macros to access members of any of the above structures using their |
| 432 | * offsets |
| 433 | */ |
Zelalem | 91d8061 | 2020-02-12 10:37:03 -0600 | [diff] [blame] | 434 | #define read_ctx_reg(ctx, offset) ((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) |
| 435 | #define write_ctx_reg(ctx, offset, val) (((ctx)->ctx_regs[(offset) >> DWORD_SHIFT]) \ |
Jeenu Viswambharan | 32ceef5 | 2018-08-02 10:14:12 +0100 | [diff] [blame] | 436 | = (uint64_t) (val)) |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 437 | |
| 438 | /* |
Zelalem Aweke | b6301e6 | 2021-07-09 17:54:30 -0500 | [diff] [blame] | 439 | * Top-level context structure which is used by EL3 firmware to preserve |
| 440 | * the state of a core at the next lower EL in a given security state and |
| 441 | * save enough EL3 meta data to be able to return to that EL and security |
| 442 | * state. The context management library will be used to ensure that |
| 443 | * SP_EL3 always points to an instance of this structure at exception |
| 444 | * entry and exit. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 445 | */ |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 446 | typedef struct cpu_context { |
| 447 | gp_regs_t gpregs_ctx; |
| 448 | el3_state_t el3state_ctx; |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 449 | el1_sysregs_t el1_sysregs_ctx; |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 450 | |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 451 | #if CTX_INCLUDE_FPREGS |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 452 | fp_regs_t fpregs_ctx; |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 453 | #endif |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 454 | cve_2018_3639_t cve_2018_3639_ctx; |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 455 | |
Jayanth Dodderi Chidanand | 3a71df6 | 2024-06-05 11:13:05 +0100 | [diff] [blame^] | 456 | #if ERRATA_SPECULATIVE_AT |
| 457 | errata_speculative_at_t errata_speculative_at_ctx; |
| 458 | #endif |
| 459 | |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 460 | #if CTX_INCLUDE_PAUTH_REGS |
| 461 | pauth_t pauth_ctx; |
| 462 | #endif |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 463 | |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 464 | #if CTX_INCLUDE_EL2_REGS |
| 465 | el2_sysregs_t el2_sysregs_ctx; |
| 466 | #endif |
| 467 | |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 468 | } cpu_context_t; |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 469 | |
Elizabeth Ho | 4fc00d2 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 470 | /* |
| 471 | * Per-World Context. |
| 472 | * It stores registers whose values can be shared across CPUs. |
| 473 | */ |
| 474 | typedef struct per_world_context { |
| 475 | uint64_t ctx_cptr_el3; |
| 476 | uint64_t ctx_zcr_el3; |
Arvind Ram Prakash | b5d9559 | 2023-11-08 12:28:30 -0600 | [diff] [blame] | 477 | uint64_t ctx_mpam3_el3; |
Elizabeth Ho | 4fc00d2 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 478 | } per_world_context_t; |
| 479 | |
| 480 | extern per_world_context_t per_world_context[CPU_DATA_CONTEXT_NUM]; |
| 481 | |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 482 | /* Macros to access members of the 'cpu_context_t' structure */ |
| 483 | #define get_el3state_ctx(h) (&((cpu_context_t *) h)->el3state_ctx) |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 484 | #if CTX_INCLUDE_FPREGS |
Antonio Nino Diaz | 13adfb1 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 485 | # define get_fpregs_ctx(h) (&((cpu_context_t *) h)->fpregs_ctx) |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 486 | #endif |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 487 | #define get_el1_sysregs_ctx(h) (&((cpu_context_t *) h)->el1_sysregs_ctx) |
| 488 | #if CTX_INCLUDE_EL2_REGS |
| 489 | # define get_el2_sysregs_ctx(h) (&((cpu_context_t *) h)->el2_sysregs_ctx) |
| 490 | #endif |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 491 | #define get_gpregs_ctx(h) (&((cpu_context_t *) h)->gpregs_ctx) |
Dimitris Papastamos | bb1fd5b | 2018-06-07 11:29:15 +0100 | [diff] [blame] | 492 | #define get_cve_2018_3639_ctx(h) (&((cpu_context_t *) h)->cve_2018_3639_ctx) |
Jayanth Dodderi Chidanand | 3a71df6 | 2024-06-05 11:13:05 +0100 | [diff] [blame^] | 493 | |
| 494 | #if ERRATA_SPECULATIVE_AT |
| 495 | #define get_errata_speculative_at_ctx(h) (&((cpu_context_t *) h)->errata_speculative_at_ctx) |
| 496 | #endif |
| 497 | |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 498 | #if CTX_INCLUDE_PAUTH_REGS |
| 499 | # define get_pauth_ctx(h) (&((cpu_context_t *) h)->pauth_ctx) |
| 500 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 501 | |
| 502 | /* |
| 503 | * Compile time assertions related to the 'cpu_context' structure to |
| 504 | * ensure that the assembler and the compiler view of the offsets of |
| 505 | * the structure members is the same. |
| 506 | */ |
Elyes Haouas | 183638f | 2023-02-13 10:05:41 +0100 | [diff] [blame] | 507 | CASSERT(CTX_GPREGS_OFFSET == __builtin_offsetof(cpu_context_t, gpregs_ctx), |
Achin Gupta | 07f4e07 | 2014-02-02 12:02:23 +0000 | [diff] [blame] | 508 | assert_core_context_gp_offset_mismatch); |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 509 | |
| 510 | CASSERT(CTX_EL3STATE_OFFSET == __builtin_offsetof(cpu_context_t, el3state_ctx), |
| 511 | assert_core_context_el3state_offset_mismatch); |
| 512 | |
Elyes Haouas | 183638f | 2023-02-13 10:05:41 +0100 | [diff] [blame] | 513 | CASSERT(CTX_EL1_SYSREGS_OFFSET == __builtin_offsetof(cpu_context_t, el1_sysregs_ctx), |
Max Shvetsov | c9e2c92 | 2020-02-17 16:15:47 +0000 | [diff] [blame] | 514 | assert_core_context_el1_sys_offset_mismatch); |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 515 | |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 516 | #if CTX_INCLUDE_FPREGS |
Elyes Haouas | 183638f | 2023-02-13 10:05:41 +0100 | [diff] [blame] | 517 | CASSERT(CTX_FPREGS_OFFSET == __builtin_offsetof(cpu_context_t, fpregs_ctx), |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 518 | assert_core_context_fp_offset_mismatch); |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 519 | #endif /* CTX_INCLUDE_FPREGS */ |
| 520 | |
Elyes Haouas | 183638f | 2023-02-13 10:05:41 +0100 | [diff] [blame] | 521 | CASSERT(CTX_CVE_2018_3639_OFFSET == __builtin_offsetof(cpu_context_t, cve_2018_3639_ctx), |
Dimitris Papastamos | ba51d9e | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 522 | assert_core_context_cve_2018_3639_offset_mismatch); |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 523 | |
Jayanth Dodderi Chidanand | 3a71df6 | 2024-06-05 11:13:05 +0100 | [diff] [blame^] | 524 | #if ERRATA_SPECULATIVE_AT |
| 525 | CASSERT(CTX_ERRATA_SPEC_AT_OFFSET == __builtin_offsetof(cpu_context_t, errata_speculative_at_ctx), |
| 526 | assert_core_context_errata_speculative_at_offset_mismatch); |
| 527 | #endif |
| 528 | |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 529 | #if CTX_INCLUDE_PAUTH_REGS |
Elyes Haouas | 183638f | 2023-02-13 10:05:41 +0100 | [diff] [blame] | 530 | CASSERT(CTX_PAUTH_REGS_OFFSET == __builtin_offsetof(cpu_context_t, pauth_ctx), |
Antonio Nino Diaz | 594811b | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 531 | assert_core_context_pauth_offset_mismatch); |
Jayanth Dodderi Chidanand | fbbee6b | 2024-01-24 20:05:07 +0000 | [diff] [blame] | 532 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
| 533 | |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 534 | /* |
| 535 | * Helper macro to set the general purpose registers that correspond to |
| 536 | * parameters in an aapcs_64 call i.e. x0-x7 |
| 537 | */ |
| 538 | #define set_aapcs_args0(ctx, x0) do { \ |
| 539 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X0, x0); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 540 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 541 | #define set_aapcs_args1(ctx, x0, x1) do { \ |
| 542 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X1, x1); \ |
| 543 | set_aapcs_args0(ctx, x0); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 544 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 545 | #define set_aapcs_args2(ctx, x0, x1, x2) do { \ |
| 546 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X2, x2); \ |
| 547 | set_aapcs_args1(ctx, x0, x1); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 548 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 549 | #define set_aapcs_args3(ctx, x0, x1, x2, x3) do { \ |
| 550 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X3, x3); \ |
| 551 | set_aapcs_args2(ctx, x0, x1, x2); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 552 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 553 | #define set_aapcs_args4(ctx, x0, x1, x2, x3, x4) do { \ |
| 554 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X4, x4); \ |
| 555 | set_aapcs_args3(ctx, x0, x1, x2, x3); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 556 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 557 | #define set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5) do { \ |
| 558 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X5, x5); \ |
| 559 | set_aapcs_args4(ctx, x0, x1, x2, x3, x4); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 560 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 561 | #define set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6) do { \ |
| 562 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X6, x6); \ |
| 563 | set_aapcs_args5(ctx, x0, x1, x2, x3, x4, x5); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 564 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 565 | #define set_aapcs_args7(ctx, x0, x1, x2, x3, x4, x5, x6, x7) do { \ |
| 566 | write_ctx_reg(get_gpregs_ctx(ctx), CTX_GPREG_X7, x7); \ |
| 567 | set_aapcs_args6(ctx, x0, x1, x2, x3, x4, x5, x6); \ |
Soby Mathew | 24ab34f | 2016-05-03 17:11:42 +0100 | [diff] [blame] | 568 | } while (0) |
Achin Gupta | 607084e | 2014-02-09 18:24:19 +0000 | [diff] [blame] | 569 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 570 | /******************************************************************************* |
| 571 | * Function prototypes |
| 572 | ******************************************************************************/ |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 573 | #if CTX_INCLUDE_FPREGS |
Dan Handley | e2712bc | 2014-04-10 15:37:22 +0100 | [diff] [blame] | 574 | void fpregs_context_save(fp_regs_t *regs); |
| 575 | void fpregs_context_restore(fp_regs_t *regs); |
Juan Castillo | 258e94f | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 576 | #endif |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 577 | |
Julius Werner | 53456fc | 2019-07-09 13:49:11 -0700 | [diff] [blame] | 578 | #endif /* __ASSEMBLER__ */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 579 | |
Antonio Nino Diaz | 864ca6f | 2018-10-31 15:25:35 +0000 | [diff] [blame] | 580 | #endif /* CONTEXT_H */ |