blob: 6d4017a278209f35610643cdd2130d8a11efcf7d [file] [log] [blame]
Isla Mitchellea84d6b2017-08-03 16:04:46 +01001/*
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -08002 * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
Isla Mitchellea84d6b2017-08-03 16:04:46 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00009#include <common/bl_common.h>
Isla Mitchellea84d6b2017-08-03 16:04:46 +010010#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <plat_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000013#include <services/arm_arch_svc.h>
Bipin Raviee56a8a2022-02-08 19:32:38 -060014#include "wa_cve_2022_23960_bhb.S"
Isla Mitchellea84d6b2017-08-03 16:04:46 +010015
John Tsichritzisfe6df392019-03-19 17:20:52 +000016/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
Saurabh Gorechab8493012022-04-05 00:11:52 +053020 .globl cortex_a76_reset_func
21 .globl cortex_a76_core_pwr_dwn
22 .globl cortex_a76_disable_wa_cve_2018_3639
John Tsichritzisfe6df392019-03-19 17:20:52 +000023
John Tsichritzis7557c662019-06-03 13:54:30 +010024/* 64-bit only core */
25#if CTX_INCLUDE_AARCH32_REGS == 1
26#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
27#endif
28
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010029#define ESR_EL3_A64_SMC0 0x5e000000
30#define ESR_EL3_A32_SMC0 0x4e000000
31
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +000032#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010033 /*
34 * This macro applies the mitigation for CVE-2018-3639.
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000035 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010036 * SMC calls from a lower EL running in AArch32 or AArch64
37 * will go through the fast and return early.
38 *
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000039 * The macro saves x2-x3 to the context. In the fast path
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010040 * x0-x3 registers do not need to be restored as the calling
Bipin Ravif267b372022-02-02 23:03:28 -060041 * context will have saved them. The macro also saves
42 * x29-x30 to the context in the sync_exception path.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010043 */
44 .macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
45 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010046 .if \_is_sync_exception
Bipin Ravif267b372022-02-02 23:03:28 -060047 stp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
48 mov_imm w2, \_esr_el3_val
49 bl apply_cve_2018_3639_sync_wa
50 ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010051 .endif
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010052 /*
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000053 * Always enable v4 mitigation during EL3 execution. This is not
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010054 * required for the fast path above because it does not perform any
55 * memory loads.
56 */
57 mrs x2, CORTEX_A76_CPUACTLR2_EL1
58 orr x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
59 msr CORTEX_A76_CPUACTLR2_EL1, x2
60 isb
61
62 /*
63 * The caller may have passed arguments to EL3 via x2-x3.
64 * Restore these registers from the context before jumping to the
65 * main runtime vector table entry.
66 */
67 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
68 .endm
Bipin Raviee56a8a2022-02-08 19:32:38 -060069#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010070
Bipin Raviee56a8a2022-02-08 19:32:38 -060071#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
72vector_base cortex_a76_wa_cve_vbar
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010073
74 /* ---------------------------------------------------------------------
75 * Current EL with SP_EL0 : 0x0 - 0x200
76 * ---------------------------------------------------------------------
77 */
78vector_entry cortex_a76_sync_exception_sp_el0
79 b sync_exception_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010080end_vector_entry cortex_a76_sync_exception_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010081
82vector_entry cortex_a76_irq_sp_el0
83 b irq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010084end_vector_entry cortex_a76_irq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010085
86vector_entry cortex_a76_fiq_sp_el0
87 b fiq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010088end_vector_entry cortex_a76_fiq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010089
90vector_entry cortex_a76_serror_sp_el0
91 b serror_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010092end_vector_entry cortex_a76_serror_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010093
94 /* ---------------------------------------------------------------------
95 * Current EL with SP_ELx: 0x200 - 0x400
96 * ---------------------------------------------------------------------
97 */
98vector_entry cortex_a76_sync_exception_sp_elx
99 b sync_exception_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100100end_vector_entry cortex_a76_sync_exception_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100101
102vector_entry cortex_a76_irq_sp_elx
103 b irq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100104end_vector_entry cortex_a76_irq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100105
106vector_entry cortex_a76_fiq_sp_elx
107 b fiq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100108end_vector_entry cortex_a76_fiq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100109
110vector_entry cortex_a76_serror_sp_elx
111 b serror_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100112end_vector_entry cortex_a76_serror_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100113
114 /* ---------------------------------------------------------------------
115 * Lower EL using AArch64 : 0x400 - 0x600
116 * ---------------------------------------------------------------------
117 */
118vector_entry cortex_a76_sync_exception_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600119
120#if WORKAROUND_CVE_2022_23960
121 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
122#endif /* WORKAROUND_CVE_2022_23960 */
123
124#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100125 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600126#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
127
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100128 b sync_exception_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100129end_vector_entry cortex_a76_sync_exception_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100130
131vector_entry cortex_a76_irq_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600132
133#if WORKAROUND_CVE_2022_23960
134 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
135#endif /* WORKAROUND_CVE_2022_23960 */
136
137#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100138 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600139#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
140
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100141 b irq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100142end_vector_entry cortex_a76_irq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100143
144vector_entry cortex_a76_fiq_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600145
146#if WORKAROUND_CVE_2022_23960
147 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
148#endif /* WORKAROUND_CVE_2022_23960 */
149
150#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100151 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600152#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
153
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100154 b fiq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100155end_vector_entry cortex_a76_fiq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100156
157vector_entry cortex_a76_serror_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600158
159#if WORKAROUND_CVE_2022_23960
160 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
161#endif /* WORKAROUND_CVE_2022_23960 */
162
163#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100164 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600165#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
166
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100167 b serror_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100168end_vector_entry cortex_a76_serror_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100169
170 /* ---------------------------------------------------------------------
171 * Lower EL using AArch32 : 0x600 - 0x800
172 * ---------------------------------------------------------------------
173 */
174vector_entry cortex_a76_sync_exception_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600175
176#if WORKAROUND_CVE_2022_23960
177 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
178#endif /* WORKAROUND_CVE_2022_23960 */
179
180#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100181 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600182#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
183
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100184 b sync_exception_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100185end_vector_entry cortex_a76_sync_exception_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100186
187vector_entry cortex_a76_irq_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600188
189#if WORKAROUND_CVE_2022_23960
190 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
191#endif /* WORKAROUND_CVE_2022_23960 */
192
193#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100194 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600195#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
196
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100197 b irq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100198end_vector_entry cortex_a76_irq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100199
200vector_entry cortex_a76_fiq_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600201
202#if WORKAROUND_CVE_2022_23960
203 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
204#endif /* WORKAROUND_CVE_2022_23960 */
205
206#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100207 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600208#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
209
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100210 b fiq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100211end_vector_entry cortex_a76_fiq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100212
213vector_entry cortex_a76_serror_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600214
215#if WORKAROUND_CVE_2022_23960
216 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
217#endif /* WORKAROUND_CVE_2022_23960 */
218
219#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100220 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600221#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
222
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100223 b serror_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100224end_vector_entry cortex_a76_serror_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600225#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
Bipin Ravif267b372022-02-02 23:03:28 -0600226
Bipin Raviee56a8a2022-02-08 19:32:38 -0600227#if DYNAMIC_WORKAROUND_CVE_2018_3639
Bipin Ravif267b372022-02-02 23:03:28 -0600228 /*
229 * -----------------------------------------------------------------
230 * This function applies the mitigation for CVE-2018-3639
231 * specifically for sync exceptions. It implements a fast path
232 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
233 * running in AArch64 will go through the fast and return early.
234 *
235 * In the fast path x0-x3 registers do not need to be restored as the
236 * calling context will have saved them.
237 *
238 * Caller must pass value of esr_el3 to compare via x2.
239 * Save and restore these registers outside of this function from the
240 * context before jumping to the main runtime vector table entry.
241 *
242 * Shall clobber: x0-x3, x30
243 * -----------------------------------------------------------------
244 */
245func apply_cve_2018_3639_sync_wa
246 /*
247 * Ensure SMC is coming from A64/A32 state on #0
248 * with W0 = SMCCC_ARCH_WORKAROUND_2
249 *
250 * This sequence evaluates as:
251 * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
252 * allowing use of a single branch operation
253 * X2 populated outside this function with the SMC FID.
254 */
255 orr w3, wzr, #SMCCC_ARCH_WORKAROUND_2
256 cmp x0, x3
257 mrs x3, esr_el3
258
259 ccmp w2, w3, #0, eq
260 /*
261 * Static predictor will predict a fall-through, optimizing
262 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
263 */
264 bne 1f
265
266 /*
267 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
268 * fast path.
269 */
270 cmp x1, xzr /* enable/disable check */
271
272 /*
273 * When the calling context wants mitigation disabled,
274 * we program the mitigation disable function in the
275 * CPU context, which gets invoked on subsequent exits from
276 * EL3 via the `el3_exit` function. Otherwise NULL is
277 * programmed in the CPU context, which results in caller's
278 * inheriting the EL3 mitigation state (enabled) on subsequent
279 * `el3_exit`.
280 */
281 mov x0, xzr
282 adr x1, cortex_a76_disable_wa_cve_2018_3639
283 csel x1, x1, x0, eq
284 str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
285
286 mrs x2, CORTEX_A76_CPUACTLR2_EL1
287 orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
288 bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
289 csel x3, x3, x1, eq
290 msr CORTEX_A76_CPUACTLR2_EL1, x3
291 ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
292 /*
293 * `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
294 */
295 exception_return /* exception_return contains ISB */
2961:
297 ret
298endfunc apply_cve_2018_3639_sync_wa
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000299#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100300
Louis Mayencourt09924472019-02-21 17:35:07 +0000301 /* --------------------------------------------------
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000302 * Errata Workaround for Cortex A76 Errata #1073348.
303 * This applies only to revision <= r1p0 of Cortex A76.
304 * Inputs:
305 * x0: variant[4:7] and revision[0:3] of current cpu.
306 * Shall clobber: x0-x17
307 * --------------------------------------------------
308 */
309func errata_a76_1073348_wa
310 /*
311 * Compare x0 against revision r1p0
312 */
313 mov x17, x30
314 bl check_errata_1073348
315 cbz x0, 1f
316 mrs x1, CORTEX_A76_CPUACTLR_EL1
317 orr x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
318 msr CORTEX_A76_CPUACTLR_EL1, x1
319 isb
3201:
321 ret x17
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100322endfunc errata_a76_1073348_wa
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000323
324func check_errata_1073348
325 mov x1, #0x10
326 b cpu_rev_var_ls
327endfunc check_errata_1073348
328
329 /* --------------------------------------------------
Louis Mayencourt09924472019-02-21 17:35:07 +0000330 * Errata Workaround for Cortex A76 Errata #1130799.
331 * This applies only to revision <= r2p0 of Cortex A76.
332 * Inputs:
333 * x0: variant[4:7] and revision[0:3] of current cpu.
334 * Shall clobber: x0-x17
335 * --------------------------------------------------
336 */
337func errata_a76_1130799_wa
338 /*
339 * Compare x0 against revision r2p0
340 */
341 mov x17, x30
342 bl check_errata_1130799
343 cbz x0, 1f
344 mrs x1, CORTEX_A76_CPUACTLR2_EL1
345 orr x1, x1 ,#(1 << 59)
346 msr CORTEX_A76_CPUACTLR2_EL1, x1
347 isb
3481:
349 ret x17
350endfunc errata_a76_1130799_wa
351
352func check_errata_1130799
353 mov x1, #0x20
354 b cpu_rev_var_ls
355endfunc check_errata_1130799
356
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000357 /* --------------------------------------------------
358 * Errata Workaround for Cortex A76 Errata #1220197.
359 * This applies only to revision <= r2p0 of Cortex A76.
360 * Inputs:
361 * x0: variant[4:7] and revision[0:3] of current cpu.
362 * Shall clobber: x0-x17
363 * --------------------------------------------------
364 */
365func errata_a76_1220197_wa
366/*
367 * Compare x0 against revision r2p0
368 */
369 mov x17, x30
370 bl check_errata_1220197
371 cbz x0, 1f
372 mrs x1, CORTEX_A76_CPUECTLR_EL1
373 orr x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
374 msr CORTEX_A76_CPUECTLR_EL1, x1
375 isb
3761:
377 ret x17
378endfunc errata_a76_1220197_wa
379
380func check_errata_1220197
381 mov x1, #0x20
382 b cpu_rev_var_ls
383endfunc check_errata_1220197
384
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100385 /* --------------------------------------------------
386 * Errata Workaround for Cortex A76 Errata #1257314.
387 * This applies only to revision <= r3p0 of Cortex A76.
388 * Inputs:
389 * x0: variant[4:7] and revision[0:3] of current cpu.
390 * Shall clobber: x0-x17
391 * --------------------------------------------------
392 */
393func errata_a76_1257314_wa
394 /*
395 * Compare x0 against revision r3p0
396 */
397 mov x17, x30
398 bl check_errata_1257314
399 cbz x0, 1f
400 mrs x1, CORTEX_A76_CPUACTLR3_EL1
401 orr x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
402 msr CORTEX_A76_CPUACTLR3_EL1, x1
403 isb
4041:
405 ret x17
406endfunc errata_a76_1257314_wa
407
408func check_errata_1257314
409 mov x1, #0x30
410 b cpu_rev_var_ls
411endfunc check_errata_1257314
412
413 /* --------------------------------------------------
414 * Errata Workaround for Cortex A76 Errata #1262888.
415 * This applies only to revision <= r3p0 of Cortex A76.
416 * Inputs:
417 * x0: variant[4:7] and revision[0:3] of current cpu.
418 * Shall clobber: x0-x17
419 * --------------------------------------------------
420 */
421func errata_a76_1262888_wa
422 /*
423 * Compare x0 against revision r3p0
424 */
425 mov x17, x30
426 bl check_errata_1262888
427 cbz x0, 1f
428 mrs x1, CORTEX_A76_CPUECTLR_EL1
429 orr x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
430 msr CORTEX_A76_CPUECTLR_EL1, x1
431 isb
4321:
433 ret x17
434endfunc errata_a76_1262888_wa
435
436func check_errata_1262888
437 mov x1, #0x30
438 b cpu_rev_var_ls
439endfunc check_errata_1262888
440
Soby Mathew16d006b2019-05-03 13:17:56 +0100441 /* ---------------------------------------------------
442 * Errata Workaround for Cortex A76 Errata #1286807.
443 * This applies only to revision <= r3p0 of Cortex A76.
444 * Due to the nature of the errata it is applied unconditionally
445 * when built in, report it as applicable in this case
446 * ---------------------------------------------------
447 */
448func check_errata_1286807
449#if ERRATA_A76_1286807
450 mov x0, #ERRATA_APPLIES
451 ret
452#else
453 mov x1, #0x30
454 b cpu_rev_var_ls
455#endif
456endfunc check_errata_1286807
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100457
johpow019603f982020-05-29 14:17:38 -0500458 /* --------------------------------------------------
459 * Errata workaround for Cortex A76 Errata #1791580.
460 * This applies to revisions <= r4p0 of Cortex A76.
461 * Inputs:
462 * x0: variant[4:7] and revision[0:3] of current cpu.
463 * Shall clobber: x0-x17
464 * --------------------------------------------------
465 */
466func errata_a76_1791580_wa
467 /* Compare x0 against revision r4p0 */
468 mov x17, x30
469 bl check_errata_1791580
470 cbz x0, 1f
471 mrs x1, CORTEX_A76_CPUACTLR2_EL1
472 orr x1, x1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
473 msr CORTEX_A76_CPUACTLR2_EL1, x1
474 isb
4751:
476 ret x17
477endfunc errata_a76_1791580_wa
478
479func check_errata_1791580
480 /* Applies to everything <=r4p0. */
481 mov x1, #0x40
482 b cpu_rev_var_ls
483endfunc check_errata_1791580
484
johpow015c9ed082020-06-02 15:02:28 -0500485 /* --------------------------------------------------
johpow0181365e32020-09-29 17:19:09 -0500486 * Errata Workaround for Cortex A76 Errata #1262606,
487 * #1275112, and #1868343. #1262606 and #1275112
488 * apply to revisions <= r3p0 and #1868343 applies to
489 * revisions <= r4p0.
490 * Inputs:
491 * x0: variant[4:7] and revision[0:3] of current cpu.
492 * Shall clobber: x0-x17
493 * --------------------------------------------------
494 */
495
496func errata_a76_1262606_1275112_1868343_wa
497 mov x17, x30
498
499/* Check for <= r3p0 cases and branch if check passes. */
500#if ERRATA_A76_1262606 || ERRATA_A76_1275112
501 bl check_errata_1262606
502 cbnz x0, 1f
503#endif
504
505/* Check for <= r4p0 cases and branch if check fails. */
506#if ERRATA_A76_1868343
507 bl check_errata_1868343
508 cbz x0, 2f
509#endif
5101:
511 mrs x1, CORTEX_A76_CPUACTLR_EL1
512 orr x1, x1, #CORTEX_A76_CPUACTLR_EL1_BIT_13
513 msr CORTEX_A76_CPUACTLR_EL1, x1
514 isb
5152:
516 ret x17
517endfunc errata_a76_1262606_1275112_1868343_wa
518
519func check_errata_1262606
520 mov x1, #0x30
521 b cpu_rev_var_ls
522endfunc check_errata_1262606
523
524func check_errata_1275112
525 mov x1, #0x30
526 b cpu_rev_var_ls
527endfunc check_errata_1275112
528
529func check_errata_1868343
530 mov x1, #0x40
531 b cpu_rev_var_ls
532endfunc check_errata_1868343
533
johpow013e34e922020-12-15 19:02:18 -0600534/* --------------------------------------------------
535 * Errata Workaround for A76 Erratum 1946160.
536 * This applies to revisions r3p0 - r4p1 of A76.
537 * It also exists in r0p0 - r2p0 but there is no fix
538 * in those revisions.
539 * Inputs:
540 * x0: variant[4:7] and revision[0:3] of current cpu.
541 * Shall clobber: x0-x17
542 * --------------------------------------------------
543 */
544func errata_a76_1946160_wa
545 /* Compare x0 against revisions r3p0 - r4p1 */
546 mov x17, x30
547 bl check_errata_1946160
548 cbz x0, 1f
549
550 mov x0, #3
551 msr S3_6_C15_C8_0, x0
552 ldr x0, =0x10E3900002
553 msr S3_6_C15_C8_2, x0
554 ldr x0, =0x10FFF00083
555 msr S3_6_C15_C8_3, x0
556 ldr x0, =0x2001003FF
557 msr S3_6_C15_C8_1, x0
558
559 mov x0, #4
560 msr S3_6_C15_C8_0, x0
561 ldr x0, =0x10E3800082
562 msr S3_6_C15_C8_2, x0
563 ldr x0, =0x10FFF00083
564 msr S3_6_C15_C8_3, x0
565 ldr x0, =0x2001003FF
566 msr S3_6_C15_C8_1, x0
567
568 mov x0, #5
569 msr S3_6_C15_C8_0, x0
570 ldr x0, =0x10E3800200
571 msr S3_6_C15_C8_2, x0
572 ldr x0, =0x10FFF003E0
573 msr S3_6_C15_C8_3, x0
574 ldr x0, =0x2001003FF
575 msr S3_6_C15_C8_1, x0
576
577 isb
5781:
579 ret x17
580endfunc errata_a76_1946160_wa
581
582func check_errata_1946160
583 /* Applies to revisions r3p0 - r4p1. */
584 mov x1, #0x30
585 mov x2, #0x41
586 b cpu_rev_var_range
587endfunc check_errata_1946160
588
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100589func check_errata_cve_2018_3639
590#if WORKAROUND_CVE_2018_3639
591 mov x0, #ERRATA_APPLIES
592#else
593 mov x0, #ERRATA_MISSING
594#endif
595 ret
596endfunc check_errata_cve_2018_3639
597
598func cortex_a76_disable_wa_cve_2018_3639
599 mrs x0, CORTEX_A76_CPUACTLR2_EL1
600 bic x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
601 msr CORTEX_A76_CPUACTLR2_EL1, x0
602 isb
603 ret
604endfunc cortex_a76_disable_wa_cve_2018_3639
605
Manish V Badarkhe7672edf2020-08-03 18:43:14 +0100606 /* --------------------------------------------------------------
607 * Errata Workaround for Cortex A76 Errata #1165522.
608 * This applies only to revisions <= r3p0 of Cortex A76.
609 * Due to the nature of the errata it is applied unconditionally
610 * when built in, report it as applicable in this case
611 * --------------------------------------------------------------
612 */
613func check_errata_1165522
614#if ERRATA_A76_1165522
615 mov x0, #ERRATA_APPLIES
616 ret
617#else
618 mov x1, #0x30
619 b cpu_rev_var_ls
620#endif
621endfunc check_errata_1165522
622
Bipin Raviee56a8a2022-02-08 19:32:38 -0600623func check_errata_cve_2022_23960
624#if WORKAROUND_CVE_2022_23960
625 mov x0, #ERRATA_APPLIES
626#else
627 mov x0, #ERRATA_MISSING
628#endif /* WORKAROUND_CVE_2022_23960 */
629 ret
630endfunc check_errata_cve_2022_23960
631
Louis Mayencourt09924472019-02-21 17:35:07 +0000632 /* -------------------------------------------------
633 * The CPU Ops reset function for Cortex-A76.
634 * Shall clobber: x0-x19
635 * -------------------------------------------------
636 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100637func cortex_a76_reset_func
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100638 mov x19, x30
Louis Mayencourt09924472019-02-21 17:35:07 +0000639 bl cpu_get_rev_var
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000640 mov x18, x0
Louis Mayencourt09924472019-02-21 17:35:07 +0000641
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000642#if ERRATA_A76_1073348
643 mov x0, x18
644 bl errata_a76_1073348_wa
645#endif
646
Louis Mayencourt09924472019-02-21 17:35:07 +0000647#if ERRATA_A76_1130799
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000648 mov x0, x18
Louis Mayencourt09924472019-02-21 17:35:07 +0000649 bl errata_a76_1130799_wa
650#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000651
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000652#if ERRATA_A76_1220197
653 mov x0, x18
654 bl errata_a76_1220197_wa
655#endif
656
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100657#if ERRATA_A76_1257314
658 mov x0, x18
659 bl errata_a76_1257314_wa
660#endif
661
johpow0181365e32020-09-29 17:19:09 -0500662#if ERRATA_A76_1262606 || ERRATA_A76_1275112 || ERRATA_A76_1868343
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100663 mov x0, x18
johpow0181365e32020-09-29 17:19:09 -0500664 bl errata_a76_1262606_1275112_1868343_wa
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100665#endif
666
667#if ERRATA_A76_1262888
668 mov x0, x18
669 bl errata_a76_1262888_wa
670#endif
671
johpow019603f982020-05-29 14:17:38 -0500672#if ERRATA_A76_1791580
673 mov x0, x18
674 bl errata_a76_1791580_wa
675#endif
676
johpow013e34e922020-12-15 19:02:18 -0600677#if ERRATA_A76_1946160
678 mov x0, x18
679 bl errata_a76_1946160_wa
680#endif
681
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100682#if WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000683 /* If the PE implements SSBS, we don't need the dynamic workaround */
684 mrs x0, id_aa64pfr1_el1
685 lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
Ambroise Vincent6dbbe432019-03-07 14:31:33 +0000686 and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000687#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
688 cmp x0, 0
689 ASM_ASSERT(ne)
690#endif
691#if DYNAMIC_WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000692 cbnz x0, 1f
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100693 mrs x0, CORTEX_A76_CPUACTLR2_EL1
694 orr x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
695 msr CORTEX_A76_CPUACTLR2_EL1, x0
696 isb
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100697
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000698#ifdef IMAGE_BL31
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100699 /*
700 * The Cortex-A76 generic vectors are overwritten to use the vectors
Ambroise Vincent6dbbe432019-03-07 14:31:33 +0000701 * defined above. This is required in order to apply mitigation
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100702 * against CVE-2018-3639 on exception entry from lower ELs.
Bipin Raviee56a8a2022-02-08 19:32:38 -0600703 * If the below vector table is used, skip overriding it again for
704 * CVE_2022_23960 as both use the same vbar.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100705 */
Bipin Raviee56a8a2022-02-08 19:32:38 -0600706 adr x0, cortex_a76_wa_cve_vbar
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100707 msr vbar_el3, x0
708 isb
Bipin Raviee56a8a2022-02-08 19:32:38 -0600709 b 2f
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000710#endif /* IMAGE_BL31 */
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100711
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +00007121:
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000713#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
714#endif /* WORKAROUND_CVE_2018_3639 */
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000715
Bipin Raviee56a8a2022-02-08 19:32:38 -0600716#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
717 /*
718 * The Cortex-A76 generic vectors are overridden to apply errata
719 * mitigation on exception entry from lower ELs. This will be bypassed
720 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
721 */
722 adr x0, cortex_a76_wa_cve_vbar
723 msr vbar_el3, x0
724 isb
725#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
7262:
727
Louis Mayencourt4498b152019-04-09 16:29:01 +0100728#if ERRATA_DSU_798953
729 bl errata_dsu_798953_wa
730#endif
731
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100732#if ERRATA_DSU_936184
733 bl errata_dsu_936184_wa
734#endif
Louis Mayencourt4498b152019-04-09 16:29:01 +0100735
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100736 ret x19
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100737endfunc cortex_a76_reset_func
738
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100739 /* ---------------------------------------------
740 * HW will do the cache maintenance while powering down
741 * ---------------------------------------------
742 */
743func cortex_a76_core_pwr_dwn
744 /* ---------------------------------------------
745 * Enable CPU power down bit in power control register
746 * ---------------------------------------------
747 */
748 mrs x0, CORTEX_A76_CPUPWRCTLR_EL1
749 orr x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
750 msr CORTEX_A76_CPUPWRCTLR_EL1, x0
751 isb
752 ret
753endfunc cortex_a76_core_pwr_dwn
754
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100755#if REPORT_ERRATA
756/*
Louis Mayencourt4498b152019-04-09 16:29:01 +0100757 * Errata printing function for Cortex A76. Must follow AAPCS.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100758 */
759func cortex_a76_errata_report
760 stp x8, x30, [sp, #-16]!
761
762 bl cpu_get_rev_var
763 mov x8, x0
764
765 /*
766 * Report all errata. The revision-variant information is passed to
767 * checking functions of each errata.
768 */
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000769 report_errata ERRATA_A76_1073348, cortex_a76, 1073348
Louis Mayencourt09924472019-02-21 17:35:07 +0000770 report_errata ERRATA_A76_1130799, cortex_a76, 1130799
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000771 report_errata ERRATA_A76_1220197, cortex_a76, 1220197
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100772 report_errata ERRATA_A76_1257314, cortex_a76, 1257314
773 report_errata ERRATA_A76_1262606, cortex_a76, 1262606
774 report_errata ERRATA_A76_1262888, cortex_a76, 1262888
775 report_errata ERRATA_A76_1275112, cortex_a76, 1275112
Soby Mathew16d006b2019-05-03 13:17:56 +0100776 report_errata ERRATA_A76_1286807, cortex_a76, 1286807
johpow019603f982020-05-29 14:17:38 -0500777 report_errata ERRATA_A76_1791580, cortex_a76, 1791580
Manish V Badarkhe7672edf2020-08-03 18:43:14 +0100778 report_errata ERRATA_A76_1165522, cortex_a76, 1165522
johpow0181365e32020-09-29 17:19:09 -0500779 report_errata ERRATA_A76_1868343, cortex_a76, 1868343
johpow013e34e922020-12-15 19:02:18 -0600780 report_errata ERRATA_A76_1946160, cortex_a76, 1946160
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100781 report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
Louis Mayencourt4498b152019-04-09 16:29:01 +0100782 report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100783 report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
Bipin Raviee56a8a2022-02-08 19:32:38 -0600784 report_errata WORKAROUND_CVE_2022_23960, cortex_a76, cve_2022_23960
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100785
786 ldp x8, x30, [sp], #16
787 ret
788endfunc cortex_a76_errata_report
789#endif
790
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100791 /* ---------------------------------------------
792 * This function provides cortex_a76 specific
793 * register information for crash reporting.
794 * It needs to return with x6 pointing to
795 * a list of register names in ascii and
796 * x8 - x15 having values of registers to be
797 * reported.
798 * ---------------------------------------------
799 */
800.section .rodata.cortex_a76_regs, "aS"
801cortex_a76_regs: /* The ascii list of register names to be reported */
802 .asciz "cpuectlr_el1", ""
803
804func cortex_a76_cpu_reg_dump
805 adr x6, cortex_a76_regs
806 mrs x8, CORTEX_A76_CPUECTLR_EL1
807 ret
808endfunc cortex_a76_cpu_reg_dump
809
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100810declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
811 cortex_a76_reset_func, \
812 CPU_NO_EXTRA1_FUNC, \
813 cortex_a76_disable_wa_cve_2018_3639, \
Bipin Ravicaa2e052022-02-23 23:45:50 -0600814 CPU_NO_EXTRA3_FUNC, \
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100815 cortex_a76_core_pwr_dwn