blob: 8b3d7300eb5526a15f8f841364acc9be48cfe4b4 [file] [log] [blame]
Isla Mitchellea84d6b2017-08-03 16:04:46 +01001/*
Govindraj Raja331bdef2023-06-15 17:34:15 -05002 * Copyright (c) 2017-2023, Arm Limited and Contributors. All rights reserved.
Isla Mitchellea84d6b2017-08-03 16:04:46 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00009#include <common/bl_common.h>
Isla Mitchellea84d6b2017-08-03 16:04:46 +010010#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <plat_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000013#include <services/arm_arch_svc.h>
Bipin Raviee56a8a2022-02-08 19:32:38 -060014#include "wa_cve_2022_23960_bhb.S"
Isla Mitchellea84d6b2017-08-03 16:04:46 +010015
John Tsichritzisfe6df392019-03-19 17:20:52 +000016/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
Saurabh Gorechab8493012022-04-05 00:11:52 +053020 .globl cortex_a76_reset_func
21 .globl cortex_a76_core_pwr_dwn
22 .globl cortex_a76_disable_wa_cve_2018_3639
John Tsichritzisfe6df392019-03-19 17:20:52 +000023
John Tsichritzis7557c662019-06-03 13:54:30 +010024/* 64-bit only core */
25#if CTX_INCLUDE_AARCH32_REGS == 1
26#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
27#endif
28
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010029#define ESR_EL3_A64_SMC0 0x5e000000
30#define ESR_EL3_A32_SMC0 0x4e000000
31
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +000032#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010033 /*
34 * This macro applies the mitigation for CVE-2018-3639.
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000035 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010036 * SMC calls from a lower EL running in AArch32 or AArch64
37 * will go through the fast and return early.
38 *
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000039 * The macro saves x2-x3 to the context. In the fast path
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010040 * x0-x3 registers do not need to be restored as the calling
Bipin Ravif267b372022-02-02 23:03:28 -060041 * context will have saved them. The macro also saves
42 * x29-x30 to the context in the sync_exception path.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010043 */
44 .macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
45 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010046 .if \_is_sync_exception
Bipin Ravif267b372022-02-02 23:03:28 -060047 stp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
48 mov_imm w2, \_esr_el3_val
49 bl apply_cve_2018_3639_sync_wa
50 ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010051 .endif
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010052 /*
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000053 * Always enable v4 mitigation during EL3 execution. This is not
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010054 * required for the fast path above because it does not perform any
55 * memory loads.
56 */
57 mrs x2, CORTEX_A76_CPUACTLR2_EL1
58 orr x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
59 msr CORTEX_A76_CPUACTLR2_EL1, x2
60 isb
61
62 /*
63 * The caller may have passed arguments to EL3 via x2-x3.
64 * Restore these registers from the context before jumping to the
65 * main runtime vector table entry.
66 */
67 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
68 .endm
Bipin Raviee56a8a2022-02-08 19:32:38 -060069#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010070
Bipin Raviee56a8a2022-02-08 19:32:38 -060071#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
72vector_base cortex_a76_wa_cve_vbar
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010073
74 /* ---------------------------------------------------------------------
75 * Current EL with SP_EL0 : 0x0 - 0x200
76 * ---------------------------------------------------------------------
77 */
78vector_entry cortex_a76_sync_exception_sp_el0
79 b sync_exception_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010080end_vector_entry cortex_a76_sync_exception_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010081
82vector_entry cortex_a76_irq_sp_el0
83 b irq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010084end_vector_entry cortex_a76_irq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010085
86vector_entry cortex_a76_fiq_sp_el0
87 b fiq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010088end_vector_entry cortex_a76_fiq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010089
90vector_entry cortex_a76_serror_sp_el0
91 b serror_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010092end_vector_entry cortex_a76_serror_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010093
94 /* ---------------------------------------------------------------------
95 * Current EL with SP_ELx: 0x200 - 0x400
96 * ---------------------------------------------------------------------
97 */
98vector_entry cortex_a76_sync_exception_sp_elx
99 b sync_exception_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100100end_vector_entry cortex_a76_sync_exception_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100101
102vector_entry cortex_a76_irq_sp_elx
103 b irq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100104end_vector_entry cortex_a76_irq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100105
106vector_entry cortex_a76_fiq_sp_elx
107 b fiq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100108end_vector_entry cortex_a76_fiq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100109
110vector_entry cortex_a76_serror_sp_elx
111 b serror_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100112end_vector_entry cortex_a76_serror_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100113
114 /* ---------------------------------------------------------------------
115 * Lower EL using AArch64 : 0x400 - 0x600
116 * ---------------------------------------------------------------------
117 */
118vector_entry cortex_a76_sync_exception_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600119
120#if WORKAROUND_CVE_2022_23960
121 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
122#endif /* WORKAROUND_CVE_2022_23960 */
123
124#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100125 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600126#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
127
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100128 b sync_exception_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100129end_vector_entry cortex_a76_sync_exception_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100130
131vector_entry cortex_a76_irq_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600132
133#if WORKAROUND_CVE_2022_23960
134 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
135#endif /* WORKAROUND_CVE_2022_23960 */
136
137#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100138 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600139#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
140
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100141 b irq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100142end_vector_entry cortex_a76_irq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100143
144vector_entry cortex_a76_fiq_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600145
146#if WORKAROUND_CVE_2022_23960
147 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
148#endif /* WORKAROUND_CVE_2022_23960 */
149
150#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100151 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600152#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
153
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100154 b fiq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100155end_vector_entry cortex_a76_fiq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100156
157vector_entry cortex_a76_serror_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600158
159#if WORKAROUND_CVE_2022_23960
160 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
161#endif /* WORKAROUND_CVE_2022_23960 */
162
163#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100164 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600165#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
166
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100167 b serror_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100168end_vector_entry cortex_a76_serror_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100169
170 /* ---------------------------------------------------------------------
171 * Lower EL using AArch32 : 0x600 - 0x800
172 * ---------------------------------------------------------------------
173 */
174vector_entry cortex_a76_sync_exception_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600175
176#if WORKAROUND_CVE_2022_23960
177 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
178#endif /* WORKAROUND_CVE_2022_23960 */
179
180#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100181 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600182#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
183
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100184 b sync_exception_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100185end_vector_entry cortex_a76_sync_exception_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100186
187vector_entry cortex_a76_irq_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600188
189#if WORKAROUND_CVE_2022_23960
190 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
191#endif /* WORKAROUND_CVE_2022_23960 */
192
193#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100194 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600195#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
196
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100197 b irq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100198end_vector_entry cortex_a76_irq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100199
200vector_entry cortex_a76_fiq_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600201
202#if WORKAROUND_CVE_2022_23960
203 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
204#endif /* WORKAROUND_CVE_2022_23960 */
205
206#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100207 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600208#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
209
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100210 b fiq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100211end_vector_entry cortex_a76_fiq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100212
213vector_entry cortex_a76_serror_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600214
215#if WORKAROUND_CVE_2022_23960
216 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
217#endif /* WORKAROUND_CVE_2022_23960 */
218
219#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100220 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600221#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
222
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100223 b serror_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100224end_vector_entry cortex_a76_serror_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600225#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
Bipin Ravif267b372022-02-02 23:03:28 -0600226
Bipin Raviee56a8a2022-02-08 19:32:38 -0600227#if DYNAMIC_WORKAROUND_CVE_2018_3639
Bipin Ravif267b372022-02-02 23:03:28 -0600228 /*
229 * -----------------------------------------------------------------
230 * This function applies the mitigation for CVE-2018-3639
231 * specifically for sync exceptions. It implements a fast path
232 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
233 * running in AArch64 will go through the fast and return early.
234 *
235 * In the fast path x0-x3 registers do not need to be restored as the
236 * calling context will have saved them.
237 *
238 * Caller must pass value of esr_el3 to compare via x2.
239 * Save and restore these registers outside of this function from the
240 * context before jumping to the main runtime vector table entry.
241 *
242 * Shall clobber: x0-x3, x30
243 * -----------------------------------------------------------------
244 */
245func apply_cve_2018_3639_sync_wa
246 /*
247 * Ensure SMC is coming from A64/A32 state on #0
248 * with W0 = SMCCC_ARCH_WORKAROUND_2
249 *
250 * This sequence evaluates as:
251 * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
252 * allowing use of a single branch operation
253 * X2 populated outside this function with the SMC FID.
254 */
255 orr w3, wzr, #SMCCC_ARCH_WORKAROUND_2
256 cmp x0, x3
257 mrs x3, esr_el3
258
259 ccmp w2, w3, #0, eq
260 /*
261 * Static predictor will predict a fall-through, optimizing
262 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
263 */
264 bne 1f
265
266 /*
267 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
268 * fast path.
269 */
270 cmp x1, xzr /* enable/disable check */
271
272 /*
273 * When the calling context wants mitigation disabled,
274 * we program the mitigation disable function in the
275 * CPU context, which gets invoked on subsequent exits from
276 * EL3 via the `el3_exit` function. Otherwise NULL is
277 * programmed in the CPU context, which results in caller's
278 * inheriting the EL3 mitigation state (enabled) on subsequent
279 * `el3_exit`.
280 */
281 mov x0, xzr
282 adr x1, cortex_a76_disable_wa_cve_2018_3639
283 csel x1, x1, x0, eq
284 str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
285
286 mrs x2, CORTEX_A76_CPUACTLR2_EL1
287 orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
288 bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
289 csel x3, x3, x1, eq
290 msr CORTEX_A76_CPUACTLR2_EL1, x3
291 ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
292 /*
293 * `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
294 */
295 exception_return /* exception_return contains ISB */
2961:
297 ret
298endfunc apply_cve_2018_3639_sync_wa
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000299#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100300
Govindraj Raja331bdef2023-06-15 17:34:15 -0500301workaround_reset_start cortex_a76, ERRATUM(1073348), ERRATA_A76_1073348
Govindraj Raja22393432023-08-02 14:41:41 -0500302 sysreg_bit_set CORTEX_A76_CPUACTLR_EL1 ,CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
Govindraj Raja331bdef2023-06-15 17:34:15 -0500303workaround_reset_end cortex_a76, ERRATUM(1073348)
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000304
Govindraj Raja331bdef2023-06-15 17:34:15 -0500305check_erratum_ls cortex_a76, ERRATUM(1073348), CPU_REV(1, 0)
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000306
Govindraj Raja331bdef2023-06-15 17:34:15 -0500307workaround_reset_start cortex_a76, ERRATUM(1130799), ERRATA_A76_1130799
Govindraj Raja22393432023-08-02 14:41:41 -0500308 sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_59
Louis Mayencourt09924472019-02-21 17:35:07 +0000309 msr CORTEX_A76_CPUACTLR2_EL1, x1
Govindraj Raja331bdef2023-06-15 17:34:15 -0500310workaround_reset_end cortex_a76, ERRATUM(1130799)
Louis Mayencourt09924472019-02-21 17:35:07 +0000311
Govindraj Raja331bdef2023-06-15 17:34:15 -0500312check_erratum_ls cortex_a76, ERRATUM(1130799), CPU_REV(2, 0)
Louis Mayencourt09924472019-02-21 17:35:07 +0000313
Govindraj Raja331bdef2023-06-15 17:34:15 -0500314workaround_reset_start cortex_a76, ERRATUM(1220197), ERRATA_A76_1220197
Govindraj Raja22393432023-08-02 14:41:41 -0500315 sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
Govindraj Raja331bdef2023-06-15 17:34:15 -0500316workaround_reset_end cortex_a76, ERRATUM(1220197)
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000317
Govindraj Raja331bdef2023-06-15 17:34:15 -0500318check_erratum_ls cortex_a76, ERRATUM(1220197), CPU_REV(2, 0)
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000319
Govindraj Raja331bdef2023-06-15 17:34:15 -0500320workaround_reset_start cortex_a76, ERRATUM(1257314), ERRATA_A76_1257314
Govindraj Raja22393432023-08-02 14:41:41 -0500321 sysreg_bit_set CORTEX_A76_CPUACTLR3_EL1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
Govindraj Raja331bdef2023-06-15 17:34:15 -0500322workaround_reset_end cortex_a76, ERRATUM(1257314)
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100323
Govindraj Raja331bdef2023-06-15 17:34:15 -0500324check_erratum_ls cortex_a76, ERRATUM(1257314), CPU_REV(3, 0)
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100325
Govindraj Raja331bdef2023-06-15 17:34:15 -0500326workaround_reset_start cortex_a76, ERRATUM(1262606), ERRATA_A76_1262606
Govindraj Raja22393432023-08-02 14:41:41 -0500327 sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
Govindraj Raja331bdef2023-06-15 17:34:15 -0500328workaround_reset_end cortex_a76, ERRATUM(1262606)
329
330check_erratum_ls cortex_a76, ERRATUM(1262606), CPU_REV(3, 0)
331
332workaround_reset_start cortex_a76, ERRATUM(1262888), ERRATA_A76_1262888
Govindraj Raja22393432023-08-02 14:41:41 -0500333 sysreg_bit_set CORTEX_A76_CPUECTLR_EL1, CORTEX_A76_CPUECTLR_EL1_BIT_51
Govindraj Raja331bdef2023-06-15 17:34:15 -0500334workaround_reset_end cortex_a76, ERRATUM(1262888)
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100335
Govindraj Raja331bdef2023-06-15 17:34:15 -0500336check_erratum_ls cortex_a76, ERRATUM(1262888), CPU_REV(3, 0)
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100337
Govindraj Raja331bdef2023-06-15 17:34:15 -0500338workaround_reset_start cortex_a76, ERRATUM(1275112), ERRATA_A76_1275112
Govindraj Raja22393432023-08-02 14:41:41 -0500339 sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
Govindraj Raja331bdef2023-06-15 17:34:15 -0500340workaround_reset_end cortex_a76, ERRATUM(1275112)
341
342check_erratum_ls cortex_a76, ERRATUM(1275112), CPU_REV(3, 0)
343
344check_erratum_custom_start cortex_a76, ERRATUM(1286807)
Soby Mathew16d006b2019-05-03 13:17:56 +0100345#if ERRATA_A76_1286807
346 mov x0, #ERRATA_APPLIES
347 ret
348#else
349 mov x1, #0x30
350 b cpu_rev_var_ls
351#endif
Govindraj Raja331bdef2023-06-15 17:34:15 -0500352check_erratum_custom_end cortex_a76, ERRATUM(1286807)
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100353
Govindraj Raja331bdef2023-06-15 17:34:15 -0500354workaround_reset_start cortex_a76, ERRATUM(1791580), ERRATA_A76_1791580
Govindraj Raja22393432023-08-02 14:41:41 -0500355 sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
Govindraj Raja331bdef2023-06-15 17:34:15 -0500356workaround_reset_end cortex_a76, ERRATUM(1791580)
johpow019603f982020-05-29 14:17:38 -0500357
Govindraj Raja331bdef2023-06-15 17:34:15 -0500358check_erratum_ls cortex_a76, ERRATUM(1791580), CPU_REV(4, 0)
johpow0181365e32020-09-29 17:19:09 -0500359
Govindraj Raja331bdef2023-06-15 17:34:15 -0500360workaround_reset_start cortex_a76, ERRATUM(1868343), ERRATA_A76_1868343
Govindraj Raja22393432023-08-02 14:41:41 -0500361 sysreg_bit_set CORTEX_A76_CPUACTLR_EL1, CORTEX_A76_CPUACTLR_EL1_BIT_13
Govindraj Raja331bdef2023-06-15 17:34:15 -0500362workaround_reset_end cortex_a76, ERRATUM(1868343)
johpow0181365e32020-09-29 17:19:09 -0500363
Govindraj Raja331bdef2023-06-15 17:34:15 -0500364check_erratum_ls cortex_a76, ERRATUM(1868343), CPU_REV(4, 0)
johpow013e34e922020-12-15 19:02:18 -0600365
Govindraj Raja331bdef2023-06-15 17:34:15 -0500366workaround_reset_start cortex_a76, ERRATUM(1946160), ERRATA_A76_1946160
johpow013e34e922020-12-15 19:02:18 -0600367 mov x0, #3
368 msr S3_6_C15_C8_0, x0
369 ldr x0, =0x10E3900002
370 msr S3_6_C15_C8_2, x0
371 ldr x0, =0x10FFF00083
372 msr S3_6_C15_C8_3, x0
373 ldr x0, =0x2001003FF
374 msr S3_6_C15_C8_1, x0
375
376 mov x0, #4
377 msr S3_6_C15_C8_0, x0
378 ldr x0, =0x10E3800082
379 msr S3_6_C15_C8_2, x0
380 ldr x0, =0x10FFF00083
381 msr S3_6_C15_C8_3, x0
382 ldr x0, =0x2001003FF
383 msr S3_6_C15_C8_1, x0
384
385 mov x0, #5
386 msr S3_6_C15_C8_0, x0
387 ldr x0, =0x10E3800200
388 msr S3_6_C15_C8_2, x0
389 ldr x0, =0x10FFF003E0
390 msr S3_6_C15_C8_3, x0
391 ldr x0, =0x2001003FF
392 msr S3_6_C15_C8_1, x0
Govindraj Raja331bdef2023-06-15 17:34:15 -0500393workaround_reset_end cortex_a76, ERRATUM(1946160)
johpow013e34e922020-12-15 19:02:18 -0600394
Govindraj Raja331bdef2023-06-15 17:34:15 -0500395check_erratum_range cortex_a76, ERRATUM(1946160), CPU_REV(3, 0), CPU_REV(4, 1)
johpow013e34e922020-12-15 19:02:18 -0600396
Govindraj Raja331bdef2023-06-15 17:34:15 -0500397workaround_runtime_start cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
Bipin Ravi23e29e42022-11-02 16:50:03 -0500398 /* dsb before isb of power down sequence */
399 dsb sy
Govindraj Raja331bdef2023-06-15 17:34:15 -0500400workaround_runtime_end cortex_a76, ERRATUM(2743102)
Bipin Ravi23e29e42022-11-02 16:50:03 -0500401
Govindraj Raja331bdef2023-06-15 17:34:15 -0500402check_erratum_ls cortex_a76, ERRATUM(2743102), CPU_REV(4, 1)
Bipin Ravi23e29e42022-11-02 16:50:03 -0500403
Govindraj Raja331bdef2023-06-15 17:34:15 -0500404check_erratum_chosen cortex_a76, CVE(2018, 3639), WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100405
406func cortex_a76_disable_wa_cve_2018_3639
Govindraj Raja22393432023-08-02 14:41:41 -0500407 sysreg_bit_clear CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100408 isb
409 ret
410endfunc cortex_a76_disable_wa_cve_2018_3639
411
Govindraj Raja331bdef2023-06-15 17:34:15 -0500412/* --------------------------------------------------------------
413 * Errata Workaround for Cortex A76 Errata #1165522.
414 * This applies only to revisions <= r3p0 of Cortex A76.
415 * Due to the nature of the errata it is applied unconditionally
416 * when built in, report it as applicable in this case
417 * --------------------------------------------------------------
418 */
419check_erratum_custom_start cortex_a76, ERRATUM(1165522)
Manish V Badarkhe7672edf2020-08-03 18:43:14 +0100420#if ERRATA_A76_1165522
421 mov x0, #ERRATA_APPLIES
422 ret
423#else
424 mov x1, #0x30
425 b cpu_rev_var_ls
426#endif
Govindraj Raja331bdef2023-06-15 17:34:15 -0500427check_erratum_custom_end cortex_a76, ERRATUM(1165522)
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000428
Govindraj Raja331bdef2023-06-15 17:34:15 -0500429check_erratum_chosen cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000430
Govindraj Raja331bdef2023-06-15 17:34:15 -0500431/* erratum has no workaround in the cpu. Generic code must take care */
432add_erratum_entry cortex_a76, CVE(2022, 23960), WORKAROUND_CVE_2022_23960, NO_APPLY_AT_RESET
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100433
Govindraj Raja331bdef2023-06-15 17:34:15 -0500434/* ERRATA_DSU_798953 :
435 * The errata is defined in dsu_helpers.S but applies to cortex_a76
436 * as well. Henceforth creating symbolic names to the already existing errata
437 * workaround functions to get them registered under the Errata Framework.
438 */
439.equ check_erratum_cortex_a76_798953, check_errata_dsu_798953
440.equ erratum_cortex_a76_798953_wa, errata_dsu_798953_wa
441add_erratum_entry cortex_a76, ERRATUM(798953), ERRATA_DSU_798953, APPLY_AT_RESET
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100442
Govindraj Raja331bdef2023-06-15 17:34:15 -0500443/* ERRATA_DSU_936184 :
444 * The errata is defined in dsu_helpers.S but applies to cortex_a76
445 * as well. Henceforth creating symbolic names to the already existing errata
446 * workaround functions to get them registered under the Errata Framework.
447 */
448.equ check_erratum_cortex_a76_936184, check_errata_dsu_936184
449.equ erratum_cortex_a76_936184_wa, errata_dsu_936184_wa
450add_erratum_entry cortex_a76, ERRATUM(936184), ERRATA_DSU_936184, APPLY_AT_RESET
johpow019603f982020-05-29 14:17:38 -0500451
Govindraj Raja331bdef2023-06-15 17:34:15 -0500452cpu_reset_func_start cortex_a76
johpow013e34e922020-12-15 19:02:18 -0600453
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100454#if WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000455 /* If the PE implements SSBS, we don't need the dynamic workaround */
456 mrs x0, id_aa64pfr1_el1
457 lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
Ambroise Vincent6dbbe432019-03-07 14:31:33 +0000458 and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000459#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
460 cmp x0, 0
461 ASM_ASSERT(ne)
462#endif
463#if DYNAMIC_WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000464 cbnz x0, 1f
Govindraj Raja22393432023-08-02 14:41:41 -0500465 sysreg_bit_set CORTEX_A76_CPUACTLR2_EL1, CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100466 isb
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100467
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000468#ifdef IMAGE_BL31
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100469 /*
470 * The Cortex-A76 generic vectors are overwritten to use the vectors
Ambroise Vincent6dbbe432019-03-07 14:31:33 +0000471 * defined above. This is required in order to apply mitigation
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100472 * against CVE-2018-3639 on exception entry from lower ELs.
Bipin Raviee56a8a2022-02-08 19:32:38 -0600473 * If the below vector table is used, skip overriding it again for
474 * CVE_2022_23960 as both use the same vbar.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100475 */
Govindraj Raja22393432023-08-02 14:41:41 -0500476 override_vector_table cortex_a76_wa_cve_vbar
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100477 isb
Bipin Raviee56a8a2022-02-08 19:32:38 -0600478 b 2f
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000479#endif /* IMAGE_BL31 */
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100480
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +00004811:
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000482#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
483#endif /* WORKAROUND_CVE_2018_3639 */
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000484
Bipin Raviee56a8a2022-02-08 19:32:38 -0600485#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
486 /*
487 * The Cortex-A76 generic vectors are overridden to apply errata
488 * mitigation on exception entry from lower ELs. This will be bypassed
489 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
490 */
Govindraj Raja22393432023-08-02 14:41:41 -0500491 override_vector_table cortex_a76_wa_cve_vbar
Bipin Raviee56a8a2022-02-08 19:32:38 -0600492 isb
493#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
4942:
Govindraj Raja331bdef2023-06-15 17:34:15 -0500495cpu_reset_func_end cortex_a76
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100496
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100497 /* ---------------------------------------------
498 * HW will do the cache maintenance while powering down
499 * ---------------------------------------------
500 */
501func cortex_a76_core_pwr_dwn
502 /* ---------------------------------------------
503 * Enable CPU power down bit in power control register
504 * ---------------------------------------------
505 */
Govindraj Raja22393432023-08-02 14:41:41 -0500506 sysreg_bit_set CORTEX_A76_CPUPWRCTLR_EL1, CORTEX_A76_CORE_PWRDN_EN_MASK
Govindraj Raja331bdef2023-06-15 17:34:15 -0500507
508 apply_erratum cortex_a76, ERRATUM(2743102), ERRATA_A76_2743102
509
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100510 isb
511 ret
512endfunc cortex_a76_core_pwr_dwn
513
Govindraj Raja331bdef2023-06-15 17:34:15 -0500514errata_report_shim cortex_a76
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100515
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100516 /* ---------------------------------------------
517 * This function provides cortex_a76 specific
518 * register information for crash reporting.
519 * It needs to return with x6 pointing to
520 * a list of register names in ascii and
521 * x8 - x15 having values of registers to be
522 * reported.
523 * ---------------------------------------------
524 */
525.section .rodata.cortex_a76_regs, "aS"
526cortex_a76_regs: /* The ascii list of register names to be reported */
527 .asciz "cpuectlr_el1", ""
528
529func cortex_a76_cpu_reg_dump
530 adr x6, cortex_a76_regs
531 mrs x8, CORTEX_A76_CPUECTLR_EL1
532 ret
533endfunc cortex_a76_cpu_reg_dump
534
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100535declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
536 cortex_a76_reset_func, \
537 CPU_NO_EXTRA1_FUNC, \
538 cortex_a76_disable_wa_cve_2018_3639, \
Bipin Ravicaa2e052022-02-23 23:45:50 -0600539 CPU_NO_EXTRA3_FUNC, \
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100540 cortex_a76_core_pwr_dwn