blob: 36507defee84a24b2b6466a8f97c800bfdd283b1 [file] [log] [blame]
Isla Mitchellea84d6b2017-08-03 16:04:46 +01001/*
Bipin Ravi23e29e42022-11-02 16:50:03 -05002 * Copyright (c) 2017-2022, ARM Limited and Contributors. All rights reserved.
Isla Mitchellea84d6b2017-08-03 16:04:46 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00009#include <common/bl_common.h>
Isla Mitchellea84d6b2017-08-03 16:04:46 +010010#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <plat_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000013#include <services/arm_arch_svc.h>
Bipin Raviee56a8a2022-02-08 19:32:38 -060014#include "wa_cve_2022_23960_bhb.S"
Isla Mitchellea84d6b2017-08-03 16:04:46 +010015
John Tsichritzisfe6df392019-03-19 17:20:52 +000016/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
Saurabh Gorechab8493012022-04-05 00:11:52 +053020 .globl cortex_a76_reset_func
21 .globl cortex_a76_core_pwr_dwn
22 .globl cortex_a76_disable_wa_cve_2018_3639
John Tsichritzisfe6df392019-03-19 17:20:52 +000023
John Tsichritzis7557c662019-06-03 13:54:30 +010024/* 64-bit only core */
25#if CTX_INCLUDE_AARCH32_REGS == 1
26#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
27#endif
28
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010029#define ESR_EL3_A64_SMC0 0x5e000000
30#define ESR_EL3_A32_SMC0 0x4e000000
31
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +000032#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010033 /*
34 * This macro applies the mitigation for CVE-2018-3639.
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000035 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010036 * SMC calls from a lower EL running in AArch32 or AArch64
37 * will go through the fast and return early.
38 *
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000039 * The macro saves x2-x3 to the context. In the fast path
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010040 * x0-x3 registers do not need to be restored as the calling
Bipin Ravif267b372022-02-02 23:03:28 -060041 * context will have saved them. The macro also saves
42 * x29-x30 to the context in the sync_exception path.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010043 */
44 .macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
45 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010046 .if \_is_sync_exception
Bipin Ravif267b372022-02-02 23:03:28 -060047 stp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
48 mov_imm w2, \_esr_el3_val
49 bl apply_cve_2018_3639_sync_wa
50 ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010051 .endif
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010052 /*
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000053 * Always enable v4 mitigation during EL3 execution. This is not
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010054 * required for the fast path above because it does not perform any
55 * memory loads.
56 */
57 mrs x2, CORTEX_A76_CPUACTLR2_EL1
58 orr x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
59 msr CORTEX_A76_CPUACTLR2_EL1, x2
60 isb
61
62 /*
63 * The caller may have passed arguments to EL3 via x2-x3.
64 * Restore these registers from the context before jumping to the
65 * main runtime vector table entry.
66 */
67 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
68 .endm
Bipin Raviee56a8a2022-02-08 19:32:38 -060069#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010070
Bipin Raviee56a8a2022-02-08 19:32:38 -060071#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
72vector_base cortex_a76_wa_cve_vbar
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010073
74 /* ---------------------------------------------------------------------
75 * Current EL with SP_EL0 : 0x0 - 0x200
76 * ---------------------------------------------------------------------
77 */
78vector_entry cortex_a76_sync_exception_sp_el0
79 b sync_exception_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010080end_vector_entry cortex_a76_sync_exception_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010081
82vector_entry cortex_a76_irq_sp_el0
83 b irq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010084end_vector_entry cortex_a76_irq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010085
86vector_entry cortex_a76_fiq_sp_el0
87 b fiq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010088end_vector_entry cortex_a76_fiq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010089
90vector_entry cortex_a76_serror_sp_el0
91 b serror_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010092end_vector_entry cortex_a76_serror_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010093
94 /* ---------------------------------------------------------------------
95 * Current EL with SP_ELx: 0x200 - 0x400
96 * ---------------------------------------------------------------------
97 */
98vector_entry cortex_a76_sync_exception_sp_elx
99 b sync_exception_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100100end_vector_entry cortex_a76_sync_exception_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100101
102vector_entry cortex_a76_irq_sp_elx
103 b irq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100104end_vector_entry cortex_a76_irq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100105
106vector_entry cortex_a76_fiq_sp_elx
107 b fiq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100108end_vector_entry cortex_a76_fiq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100109
110vector_entry cortex_a76_serror_sp_elx
111 b serror_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100112end_vector_entry cortex_a76_serror_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100113
114 /* ---------------------------------------------------------------------
115 * Lower EL using AArch64 : 0x400 - 0x600
116 * ---------------------------------------------------------------------
117 */
118vector_entry cortex_a76_sync_exception_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600119
120#if WORKAROUND_CVE_2022_23960
121 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
122#endif /* WORKAROUND_CVE_2022_23960 */
123
124#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100125 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600126#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
127
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100128 b sync_exception_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100129end_vector_entry cortex_a76_sync_exception_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100130
131vector_entry cortex_a76_irq_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600132
133#if WORKAROUND_CVE_2022_23960
134 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
135#endif /* WORKAROUND_CVE_2022_23960 */
136
137#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100138 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600139#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
140
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100141 b irq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100142end_vector_entry cortex_a76_irq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100143
144vector_entry cortex_a76_fiq_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600145
146#if WORKAROUND_CVE_2022_23960
147 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
148#endif /* WORKAROUND_CVE_2022_23960 */
149
150#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100151 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600152#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
153
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100154 b fiq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100155end_vector_entry cortex_a76_fiq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100156
157vector_entry cortex_a76_serror_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600158
159#if WORKAROUND_CVE_2022_23960
160 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
161#endif /* WORKAROUND_CVE_2022_23960 */
162
163#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100164 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600165#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
166
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100167 b serror_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100168end_vector_entry cortex_a76_serror_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100169
170 /* ---------------------------------------------------------------------
171 * Lower EL using AArch32 : 0x600 - 0x800
172 * ---------------------------------------------------------------------
173 */
174vector_entry cortex_a76_sync_exception_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600175
176#if WORKAROUND_CVE_2022_23960
177 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
178#endif /* WORKAROUND_CVE_2022_23960 */
179
180#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100181 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600182#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
183
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100184 b sync_exception_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100185end_vector_entry cortex_a76_sync_exception_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100186
187vector_entry cortex_a76_irq_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600188
189#if WORKAROUND_CVE_2022_23960
190 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
191#endif /* WORKAROUND_CVE_2022_23960 */
192
193#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100194 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600195#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
196
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100197 b irq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100198end_vector_entry cortex_a76_irq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100199
200vector_entry cortex_a76_fiq_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600201
202#if WORKAROUND_CVE_2022_23960
203 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
204#endif /* WORKAROUND_CVE_2022_23960 */
205
206#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100207 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600208#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
209
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100210 b fiq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100211end_vector_entry cortex_a76_fiq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100212
213vector_entry cortex_a76_serror_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600214
215#if WORKAROUND_CVE_2022_23960
216 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
217#endif /* WORKAROUND_CVE_2022_23960 */
218
219#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100220 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600221#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
222
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100223 b serror_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100224end_vector_entry cortex_a76_serror_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600225#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
Bipin Ravif267b372022-02-02 23:03:28 -0600226
Bipin Raviee56a8a2022-02-08 19:32:38 -0600227#if DYNAMIC_WORKAROUND_CVE_2018_3639
Bipin Ravif267b372022-02-02 23:03:28 -0600228 /*
229 * -----------------------------------------------------------------
230 * This function applies the mitigation for CVE-2018-3639
231 * specifically for sync exceptions. It implements a fast path
232 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
233 * running in AArch64 will go through the fast and return early.
234 *
235 * In the fast path x0-x3 registers do not need to be restored as the
236 * calling context will have saved them.
237 *
238 * Caller must pass value of esr_el3 to compare via x2.
239 * Save and restore these registers outside of this function from the
240 * context before jumping to the main runtime vector table entry.
241 *
242 * Shall clobber: x0-x3, x30
243 * -----------------------------------------------------------------
244 */
245func apply_cve_2018_3639_sync_wa
246 /*
247 * Ensure SMC is coming from A64/A32 state on #0
248 * with W0 = SMCCC_ARCH_WORKAROUND_2
249 *
250 * This sequence evaluates as:
251 * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
252 * allowing use of a single branch operation
253 * X2 populated outside this function with the SMC FID.
254 */
255 orr w3, wzr, #SMCCC_ARCH_WORKAROUND_2
256 cmp x0, x3
257 mrs x3, esr_el3
258
259 ccmp w2, w3, #0, eq
260 /*
261 * Static predictor will predict a fall-through, optimizing
262 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
263 */
264 bne 1f
265
266 /*
267 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
268 * fast path.
269 */
270 cmp x1, xzr /* enable/disable check */
271
272 /*
273 * When the calling context wants mitigation disabled,
274 * we program the mitigation disable function in the
275 * CPU context, which gets invoked on subsequent exits from
276 * EL3 via the `el3_exit` function. Otherwise NULL is
277 * programmed in the CPU context, which results in caller's
278 * inheriting the EL3 mitigation state (enabled) on subsequent
279 * `el3_exit`.
280 */
281 mov x0, xzr
282 adr x1, cortex_a76_disable_wa_cve_2018_3639
283 csel x1, x1, x0, eq
284 str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
285
286 mrs x2, CORTEX_A76_CPUACTLR2_EL1
287 orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
288 bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
289 csel x3, x3, x1, eq
290 msr CORTEX_A76_CPUACTLR2_EL1, x3
291 ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
292 /*
293 * `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
294 */
295 exception_return /* exception_return contains ISB */
2961:
297 ret
298endfunc apply_cve_2018_3639_sync_wa
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000299#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100300
Louis Mayencourt09924472019-02-21 17:35:07 +0000301 /* --------------------------------------------------
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000302 * Errata Workaround for Cortex A76 Errata #1073348.
303 * This applies only to revision <= r1p0 of Cortex A76.
304 * Inputs:
305 * x0: variant[4:7] and revision[0:3] of current cpu.
306 * Shall clobber: x0-x17
307 * --------------------------------------------------
308 */
309func errata_a76_1073348_wa
310 /*
311 * Compare x0 against revision r1p0
312 */
313 mov x17, x30
314 bl check_errata_1073348
315 cbz x0, 1f
316 mrs x1, CORTEX_A76_CPUACTLR_EL1
317 orr x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
318 msr CORTEX_A76_CPUACTLR_EL1, x1
319 isb
3201:
321 ret x17
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100322endfunc errata_a76_1073348_wa
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000323
324func check_errata_1073348
325 mov x1, #0x10
326 b cpu_rev_var_ls
327endfunc check_errata_1073348
328
329 /* --------------------------------------------------
Louis Mayencourt09924472019-02-21 17:35:07 +0000330 * Errata Workaround for Cortex A76 Errata #1130799.
331 * This applies only to revision <= r2p0 of Cortex A76.
332 * Inputs:
333 * x0: variant[4:7] and revision[0:3] of current cpu.
334 * Shall clobber: x0-x17
335 * --------------------------------------------------
336 */
337func errata_a76_1130799_wa
338 /*
339 * Compare x0 against revision r2p0
340 */
341 mov x17, x30
342 bl check_errata_1130799
343 cbz x0, 1f
344 mrs x1, CORTEX_A76_CPUACTLR2_EL1
345 orr x1, x1 ,#(1 << 59)
346 msr CORTEX_A76_CPUACTLR2_EL1, x1
347 isb
3481:
349 ret x17
350endfunc errata_a76_1130799_wa
351
352func check_errata_1130799
353 mov x1, #0x20
354 b cpu_rev_var_ls
355endfunc check_errata_1130799
356
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000357 /* --------------------------------------------------
358 * Errata Workaround for Cortex A76 Errata #1220197.
359 * This applies only to revision <= r2p0 of Cortex A76.
360 * Inputs:
361 * x0: variant[4:7] and revision[0:3] of current cpu.
362 * Shall clobber: x0-x17
363 * --------------------------------------------------
364 */
365func errata_a76_1220197_wa
366/*
367 * Compare x0 against revision r2p0
368 */
369 mov x17, x30
370 bl check_errata_1220197
371 cbz x0, 1f
372 mrs x1, CORTEX_A76_CPUECTLR_EL1
373 orr x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
374 msr CORTEX_A76_CPUECTLR_EL1, x1
375 isb
3761:
377 ret x17
378endfunc errata_a76_1220197_wa
379
380func check_errata_1220197
381 mov x1, #0x20
382 b cpu_rev_var_ls
383endfunc check_errata_1220197
384
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100385 /* --------------------------------------------------
386 * Errata Workaround for Cortex A76 Errata #1257314.
387 * This applies only to revision <= r3p0 of Cortex A76.
388 * Inputs:
389 * x0: variant[4:7] and revision[0:3] of current cpu.
390 * Shall clobber: x0-x17
391 * --------------------------------------------------
392 */
393func errata_a76_1257314_wa
394 /*
395 * Compare x0 against revision r3p0
396 */
397 mov x17, x30
398 bl check_errata_1257314
399 cbz x0, 1f
400 mrs x1, CORTEX_A76_CPUACTLR3_EL1
401 orr x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
402 msr CORTEX_A76_CPUACTLR3_EL1, x1
403 isb
4041:
405 ret x17
406endfunc errata_a76_1257314_wa
407
408func check_errata_1257314
409 mov x1, #0x30
410 b cpu_rev_var_ls
411endfunc check_errata_1257314
412
413 /* --------------------------------------------------
414 * Errata Workaround for Cortex A76 Errata #1262888.
415 * This applies only to revision <= r3p0 of Cortex A76.
416 * Inputs:
417 * x0: variant[4:7] and revision[0:3] of current cpu.
418 * Shall clobber: x0-x17
419 * --------------------------------------------------
420 */
421func errata_a76_1262888_wa
422 /*
423 * Compare x0 against revision r3p0
424 */
425 mov x17, x30
426 bl check_errata_1262888
427 cbz x0, 1f
428 mrs x1, CORTEX_A76_CPUECTLR_EL1
429 orr x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
430 msr CORTEX_A76_CPUECTLR_EL1, x1
431 isb
4321:
433 ret x17
434endfunc errata_a76_1262888_wa
435
436func check_errata_1262888
437 mov x1, #0x30
438 b cpu_rev_var_ls
439endfunc check_errata_1262888
440
Soby Mathew16d006b2019-05-03 13:17:56 +0100441 /* ---------------------------------------------------
442 * Errata Workaround for Cortex A76 Errata #1286807.
443 * This applies only to revision <= r3p0 of Cortex A76.
444 * Due to the nature of the errata it is applied unconditionally
445 * when built in, report it as applicable in this case
446 * ---------------------------------------------------
447 */
448func check_errata_1286807
449#if ERRATA_A76_1286807
450 mov x0, #ERRATA_APPLIES
451 ret
452#else
453 mov x1, #0x30
454 b cpu_rev_var_ls
455#endif
456endfunc check_errata_1286807
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100457
johpow019603f982020-05-29 14:17:38 -0500458 /* --------------------------------------------------
459 * Errata workaround for Cortex A76 Errata #1791580.
460 * This applies to revisions <= r4p0 of Cortex A76.
461 * Inputs:
462 * x0: variant[4:7] and revision[0:3] of current cpu.
463 * Shall clobber: x0-x17
464 * --------------------------------------------------
465 */
466func errata_a76_1791580_wa
467 /* Compare x0 against revision r4p0 */
468 mov x17, x30
469 bl check_errata_1791580
470 cbz x0, 1f
471 mrs x1, CORTEX_A76_CPUACTLR2_EL1
472 orr x1, x1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
473 msr CORTEX_A76_CPUACTLR2_EL1, x1
474 isb
4751:
476 ret x17
477endfunc errata_a76_1791580_wa
478
479func check_errata_1791580
480 /* Applies to everything <=r4p0. */
481 mov x1, #0x40
482 b cpu_rev_var_ls
483endfunc check_errata_1791580
484
johpow015c9ed082020-06-02 15:02:28 -0500485 /* --------------------------------------------------
johpow0181365e32020-09-29 17:19:09 -0500486 * Errata Workaround for Cortex A76 Errata #1262606,
487 * #1275112, and #1868343. #1262606 and #1275112
488 * apply to revisions <= r3p0 and #1868343 applies to
489 * revisions <= r4p0.
490 * Inputs:
491 * x0: variant[4:7] and revision[0:3] of current cpu.
492 * Shall clobber: x0-x17
493 * --------------------------------------------------
494 */
495
496func errata_a76_1262606_1275112_1868343_wa
497 mov x17, x30
498
499/* Check for <= r3p0 cases and branch if check passes. */
500#if ERRATA_A76_1262606 || ERRATA_A76_1275112
501 bl check_errata_1262606
502 cbnz x0, 1f
503#endif
504
505/* Check for <= r4p0 cases and branch if check fails. */
506#if ERRATA_A76_1868343
507 bl check_errata_1868343
508 cbz x0, 2f
509#endif
5101:
511 mrs x1, CORTEX_A76_CPUACTLR_EL1
512 orr x1, x1, #CORTEX_A76_CPUACTLR_EL1_BIT_13
513 msr CORTEX_A76_CPUACTLR_EL1, x1
514 isb
5152:
516 ret x17
517endfunc errata_a76_1262606_1275112_1868343_wa
518
519func check_errata_1262606
520 mov x1, #0x30
521 b cpu_rev_var_ls
522endfunc check_errata_1262606
523
524func check_errata_1275112
525 mov x1, #0x30
526 b cpu_rev_var_ls
527endfunc check_errata_1275112
528
529func check_errata_1868343
530 mov x1, #0x40
531 b cpu_rev_var_ls
532endfunc check_errata_1868343
533
johpow013e34e922020-12-15 19:02:18 -0600534/* --------------------------------------------------
535 * Errata Workaround for A76 Erratum 1946160.
536 * This applies to revisions r3p0 - r4p1 of A76.
537 * It also exists in r0p0 - r2p0 but there is no fix
538 * in those revisions.
539 * Inputs:
540 * x0: variant[4:7] and revision[0:3] of current cpu.
541 * Shall clobber: x0-x17
542 * --------------------------------------------------
543 */
544func errata_a76_1946160_wa
545 /* Compare x0 against revisions r3p0 - r4p1 */
546 mov x17, x30
547 bl check_errata_1946160
548 cbz x0, 1f
549
550 mov x0, #3
551 msr S3_6_C15_C8_0, x0
552 ldr x0, =0x10E3900002
553 msr S3_6_C15_C8_2, x0
554 ldr x0, =0x10FFF00083
555 msr S3_6_C15_C8_3, x0
556 ldr x0, =0x2001003FF
557 msr S3_6_C15_C8_1, x0
558
559 mov x0, #4
560 msr S3_6_C15_C8_0, x0
561 ldr x0, =0x10E3800082
562 msr S3_6_C15_C8_2, x0
563 ldr x0, =0x10FFF00083
564 msr S3_6_C15_C8_3, x0
565 ldr x0, =0x2001003FF
566 msr S3_6_C15_C8_1, x0
567
568 mov x0, #5
569 msr S3_6_C15_C8_0, x0
570 ldr x0, =0x10E3800200
571 msr S3_6_C15_C8_2, x0
572 ldr x0, =0x10FFF003E0
573 msr S3_6_C15_C8_3, x0
574 ldr x0, =0x2001003FF
575 msr S3_6_C15_C8_1, x0
576
577 isb
5781:
579 ret x17
580endfunc errata_a76_1946160_wa
581
582func check_errata_1946160
583 /* Applies to revisions r3p0 - r4p1. */
584 mov x1, #0x30
585 mov x2, #0x41
586 b cpu_rev_var_range
587endfunc check_errata_1946160
588
Bipin Ravi23e29e42022-11-02 16:50:03 -0500589 /* ----------------------------------------------------
590 * Errata Workaround for Cortex-A76 Errata #2743102
591 * This applies to revisions <= r4p1 and is still open.
592 * x0: variant[4:7] and revision[0:3] of current cpu.
593 * Shall clobber: x0-x17
594 * ----------------------------------------------------
595 */
596func errata_a76_2743102_wa
597 mov x17, x30
598 bl check_errata_2743102
599 cbz x0, 1f
600
601 /* dsb before isb of power down sequence */
602 dsb sy
6031:
604 ret x17
605endfunc errata_a76_2743102_wa
606
607func check_errata_2743102
608 /* Applies to all revisions <= r4p1 */
609 mov x1, #0x41
610 b cpu_rev_var_ls
611endfunc check_errata_2743102
612
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100613func check_errata_cve_2018_3639
614#if WORKAROUND_CVE_2018_3639
615 mov x0, #ERRATA_APPLIES
616#else
617 mov x0, #ERRATA_MISSING
618#endif
619 ret
620endfunc check_errata_cve_2018_3639
621
622func cortex_a76_disable_wa_cve_2018_3639
623 mrs x0, CORTEX_A76_CPUACTLR2_EL1
624 bic x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
625 msr CORTEX_A76_CPUACTLR2_EL1, x0
626 isb
627 ret
628endfunc cortex_a76_disable_wa_cve_2018_3639
629
Manish V Badarkhe7672edf2020-08-03 18:43:14 +0100630 /* --------------------------------------------------------------
631 * Errata Workaround for Cortex A76 Errata #1165522.
632 * This applies only to revisions <= r3p0 of Cortex A76.
633 * Due to the nature of the errata it is applied unconditionally
634 * when built in, report it as applicable in this case
635 * --------------------------------------------------------------
636 */
637func check_errata_1165522
638#if ERRATA_A76_1165522
639 mov x0, #ERRATA_APPLIES
640 ret
641#else
642 mov x1, #0x30
643 b cpu_rev_var_ls
644#endif
645endfunc check_errata_1165522
646
Bipin Raviee56a8a2022-02-08 19:32:38 -0600647func check_errata_cve_2022_23960
648#if WORKAROUND_CVE_2022_23960
649 mov x0, #ERRATA_APPLIES
650#else
651 mov x0, #ERRATA_MISSING
652#endif /* WORKAROUND_CVE_2022_23960 */
653 ret
654endfunc check_errata_cve_2022_23960
655
Louis Mayencourt09924472019-02-21 17:35:07 +0000656 /* -------------------------------------------------
657 * The CPU Ops reset function for Cortex-A76.
658 * Shall clobber: x0-x19
659 * -------------------------------------------------
660 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100661func cortex_a76_reset_func
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100662 mov x19, x30
Louis Mayencourt09924472019-02-21 17:35:07 +0000663 bl cpu_get_rev_var
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000664 mov x18, x0
Louis Mayencourt09924472019-02-21 17:35:07 +0000665
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000666#if ERRATA_A76_1073348
667 mov x0, x18
668 bl errata_a76_1073348_wa
669#endif
670
Louis Mayencourt09924472019-02-21 17:35:07 +0000671#if ERRATA_A76_1130799
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000672 mov x0, x18
Louis Mayencourt09924472019-02-21 17:35:07 +0000673 bl errata_a76_1130799_wa
674#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000675
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000676#if ERRATA_A76_1220197
677 mov x0, x18
678 bl errata_a76_1220197_wa
679#endif
680
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100681#if ERRATA_A76_1257314
682 mov x0, x18
683 bl errata_a76_1257314_wa
684#endif
685
johpow0181365e32020-09-29 17:19:09 -0500686#if ERRATA_A76_1262606 || ERRATA_A76_1275112 || ERRATA_A76_1868343
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100687 mov x0, x18
johpow0181365e32020-09-29 17:19:09 -0500688 bl errata_a76_1262606_1275112_1868343_wa
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100689#endif
690
691#if ERRATA_A76_1262888
692 mov x0, x18
693 bl errata_a76_1262888_wa
694#endif
695
johpow019603f982020-05-29 14:17:38 -0500696#if ERRATA_A76_1791580
697 mov x0, x18
698 bl errata_a76_1791580_wa
699#endif
700
johpow013e34e922020-12-15 19:02:18 -0600701#if ERRATA_A76_1946160
702 mov x0, x18
703 bl errata_a76_1946160_wa
704#endif
705
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100706#if WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000707 /* If the PE implements SSBS, we don't need the dynamic workaround */
708 mrs x0, id_aa64pfr1_el1
709 lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
Ambroise Vincent6dbbe432019-03-07 14:31:33 +0000710 and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000711#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
712 cmp x0, 0
713 ASM_ASSERT(ne)
714#endif
715#if DYNAMIC_WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000716 cbnz x0, 1f
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100717 mrs x0, CORTEX_A76_CPUACTLR2_EL1
718 orr x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
719 msr CORTEX_A76_CPUACTLR2_EL1, x0
720 isb
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100721
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000722#ifdef IMAGE_BL31
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100723 /*
724 * The Cortex-A76 generic vectors are overwritten to use the vectors
Ambroise Vincent6dbbe432019-03-07 14:31:33 +0000725 * defined above. This is required in order to apply mitigation
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100726 * against CVE-2018-3639 on exception entry from lower ELs.
Bipin Raviee56a8a2022-02-08 19:32:38 -0600727 * If the below vector table is used, skip overriding it again for
728 * CVE_2022_23960 as both use the same vbar.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100729 */
Bipin Raviee56a8a2022-02-08 19:32:38 -0600730 adr x0, cortex_a76_wa_cve_vbar
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100731 msr vbar_el3, x0
732 isb
Bipin Raviee56a8a2022-02-08 19:32:38 -0600733 b 2f
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000734#endif /* IMAGE_BL31 */
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100735
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +00007361:
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000737#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
738#endif /* WORKAROUND_CVE_2018_3639 */
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000739
Bipin Raviee56a8a2022-02-08 19:32:38 -0600740#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
741 /*
742 * The Cortex-A76 generic vectors are overridden to apply errata
743 * mitigation on exception entry from lower ELs. This will be bypassed
744 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
745 */
746 adr x0, cortex_a76_wa_cve_vbar
747 msr vbar_el3, x0
748 isb
749#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
7502:
751
Louis Mayencourt4498b152019-04-09 16:29:01 +0100752#if ERRATA_DSU_798953
753 bl errata_dsu_798953_wa
754#endif
755
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100756#if ERRATA_DSU_936184
757 bl errata_dsu_936184_wa
758#endif
Louis Mayencourt4498b152019-04-09 16:29:01 +0100759
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100760 ret x19
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100761endfunc cortex_a76_reset_func
762
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100763 /* ---------------------------------------------
764 * HW will do the cache maintenance while powering down
765 * ---------------------------------------------
766 */
767func cortex_a76_core_pwr_dwn
768 /* ---------------------------------------------
769 * Enable CPU power down bit in power control register
770 * ---------------------------------------------
771 */
772 mrs x0, CORTEX_A76_CPUPWRCTLR_EL1
773 orr x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
774 msr CORTEX_A76_CPUPWRCTLR_EL1, x0
Bipin Ravi23e29e42022-11-02 16:50:03 -0500775#if ERRATA_A76_2743102
776 mov x15, x30
777 bl cpu_get_rev_var
778 bl errata_a76_2743102_wa
779 mov x30, x15
780#endif /* ERRATA_A76_2743102 */
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100781 isb
782 ret
783endfunc cortex_a76_core_pwr_dwn
784
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100785#if REPORT_ERRATA
786/*
Louis Mayencourt4498b152019-04-09 16:29:01 +0100787 * Errata printing function for Cortex A76. Must follow AAPCS.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100788 */
789func cortex_a76_errata_report
790 stp x8, x30, [sp, #-16]!
791
792 bl cpu_get_rev_var
793 mov x8, x0
794
795 /*
796 * Report all errata. The revision-variant information is passed to
797 * checking functions of each errata.
798 */
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000799 report_errata ERRATA_A76_1073348, cortex_a76, 1073348
Louis Mayencourt09924472019-02-21 17:35:07 +0000800 report_errata ERRATA_A76_1130799, cortex_a76, 1130799
Bipin Ravi23e29e42022-11-02 16:50:03 -0500801 report_errata ERRATA_A76_1165522, cortex_a76, 1165522
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000802 report_errata ERRATA_A76_1220197, cortex_a76, 1220197
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100803 report_errata ERRATA_A76_1257314, cortex_a76, 1257314
804 report_errata ERRATA_A76_1262606, cortex_a76, 1262606
805 report_errata ERRATA_A76_1262888, cortex_a76, 1262888
806 report_errata ERRATA_A76_1275112, cortex_a76, 1275112
Soby Mathew16d006b2019-05-03 13:17:56 +0100807 report_errata ERRATA_A76_1286807, cortex_a76, 1286807
johpow019603f982020-05-29 14:17:38 -0500808 report_errata ERRATA_A76_1791580, cortex_a76, 1791580
johpow0181365e32020-09-29 17:19:09 -0500809 report_errata ERRATA_A76_1868343, cortex_a76, 1868343
johpow013e34e922020-12-15 19:02:18 -0600810 report_errata ERRATA_A76_1946160, cortex_a76, 1946160
Bipin Ravi23e29e42022-11-02 16:50:03 -0500811 report_errata ERRATA_A76_2743102, cortex_a76, 2743102
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100812 report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
Louis Mayencourt4498b152019-04-09 16:29:01 +0100813 report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100814 report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
Bipin Raviee56a8a2022-02-08 19:32:38 -0600815 report_errata WORKAROUND_CVE_2022_23960, cortex_a76, cve_2022_23960
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100816
817 ldp x8, x30, [sp], #16
818 ret
819endfunc cortex_a76_errata_report
820#endif
821
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100822 /* ---------------------------------------------
823 * This function provides cortex_a76 specific
824 * register information for crash reporting.
825 * It needs to return with x6 pointing to
826 * a list of register names in ascii and
827 * x8 - x15 having values of registers to be
828 * reported.
829 * ---------------------------------------------
830 */
831.section .rodata.cortex_a76_regs, "aS"
832cortex_a76_regs: /* The ascii list of register names to be reported */
833 .asciz "cpuectlr_el1", ""
834
835func cortex_a76_cpu_reg_dump
836 adr x6, cortex_a76_regs
837 mrs x8, CORTEX_A76_CPUECTLR_EL1
838 ret
839endfunc cortex_a76_cpu_reg_dump
840
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100841declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
842 cortex_a76_reset_func, \
843 CPU_NO_EXTRA1_FUNC, \
844 cortex_a76_disable_wa_cve_2018_3639, \
Bipin Ravicaa2e052022-02-23 23:45:50 -0600845 CPU_NO_EXTRA3_FUNC, \
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100846 cortex_a76_core_pwr_dwn