blob: 50bd8cd9012bf09f6d82e131f5339020a13ef474 [file] [log] [blame]
Isla Mitchellea84d6b2017-08-03 16:04:46 +01001/*
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -08002 * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
Isla Mitchellea84d6b2017-08-03 16:04:46 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00009#include <common/bl_common.h>
Isla Mitchellea84d6b2017-08-03 16:04:46 +010010#include <cortex_a76.h>
11#include <cpu_macros.S>
12#include <plat_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000013#include <services/arm_arch_svc.h>
Bipin Raviee56a8a2022-02-08 19:32:38 -060014#include "wa_cve_2022_23960_bhb.S"
Isla Mitchellea84d6b2017-08-03 16:04:46 +010015
John Tsichritzisfe6df392019-03-19 17:20:52 +000016/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
20
John Tsichritzis7557c662019-06-03 13:54:30 +010021/* 64-bit only core */
22#if CTX_INCLUDE_AARCH32_REGS == 1
23#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
24#endif
25
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010026#define ESR_EL3_A64_SMC0 0x5e000000
27#define ESR_EL3_A32_SMC0 0x4e000000
28
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +000029#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010030 /*
31 * This macro applies the mitigation for CVE-2018-3639.
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000032 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010033 * SMC calls from a lower EL running in AArch32 or AArch64
34 * will go through the fast and return early.
35 *
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000036 * The macro saves x2-x3 to the context. In the fast path
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010037 * x0-x3 registers do not need to be restored as the calling
Bipin Ravif267b372022-02-02 23:03:28 -060038 * context will have saved them. The macro also saves
39 * x29-x30 to the context in the sync_exception path.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010040 */
41 .macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
42 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010043 .if \_is_sync_exception
Bipin Ravif267b372022-02-02 23:03:28 -060044 stp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
45 mov_imm w2, \_esr_el3_val
46 bl apply_cve_2018_3639_sync_wa
47 ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010048 .endif
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010049 /*
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000050 * Always enable v4 mitigation during EL3 execution. This is not
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010051 * required for the fast path above because it does not perform any
52 * memory loads.
53 */
54 mrs x2, CORTEX_A76_CPUACTLR2_EL1
55 orr x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
56 msr CORTEX_A76_CPUACTLR2_EL1, x2
57 isb
58
59 /*
60 * The caller may have passed arguments to EL3 via x2-x3.
61 * Restore these registers from the context before jumping to the
62 * main runtime vector table entry.
63 */
64 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
65 .endm
Bipin Raviee56a8a2022-02-08 19:32:38 -060066#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010067
Bipin Raviee56a8a2022-02-08 19:32:38 -060068#if DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960
69vector_base cortex_a76_wa_cve_vbar
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010070
71 /* ---------------------------------------------------------------------
72 * Current EL with SP_EL0 : 0x0 - 0x200
73 * ---------------------------------------------------------------------
74 */
75vector_entry cortex_a76_sync_exception_sp_el0
76 b sync_exception_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010077end_vector_entry cortex_a76_sync_exception_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010078
79vector_entry cortex_a76_irq_sp_el0
80 b irq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010081end_vector_entry cortex_a76_irq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010082
83vector_entry cortex_a76_fiq_sp_el0
84 b fiq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010085end_vector_entry cortex_a76_fiq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010086
87vector_entry cortex_a76_serror_sp_el0
88 b serror_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010089end_vector_entry cortex_a76_serror_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010090
91 /* ---------------------------------------------------------------------
92 * Current EL with SP_ELx: 0x200 - 0x400
93 * ---------------------------------------------------------------------
94 */
95vector_entry cortex_a76_sync_exception_sp_elx
96 b sync_exception_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +010097end_vector_entry cortex_a76_sync_exception_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010098
99vector_entry cortex_a76_irq_sp_elx
100 b irq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100101end_vector_entry cortex_a76_irq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100102
103vector_entry cortex_a76_fiq_sp_elx
104 b fiq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100105end_vector_entry cortex_a76_fiq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100106
107vector_entry cortex_a76_serror_sp_elx
108 b serror_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100109end_vector_entry cortex_a76_serror_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100110
111 /* ---------------------------------------------------------------------
112 * Lower EL using AArch64 : 0x400 - 0x600
113 * ---------------------------------------------------------------------
114 */
115vector_entry cortex_a76_sync_exception_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600116
117#if WORKAROUND_CVE_2022_23960
118 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
119#endif /* WORKAROUND_CVE_2022_23960 */
120
121#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100122 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600123#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
124
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100125 b sync_exception_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100126end_vector_entry cortex_a76_sync_exception_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100127
128vector_entry cortex_a76_irq_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600129
130#if WORKAROUND_CVE_2022_23960
131 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
132#endif /* WORKAROUND_CVE_2022_23960 */
133
134#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100135 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600136#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
137
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100138 b irq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100139end_vector_entry cortex_a76_irq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100140
141vector_entry cortex_a76_fiq_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600142
143#if WORKAROUND_CVE_2022_23960
144 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
145#endif /* WORKAROUND_CVE_2022_23960 */
146
147#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100148 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600149#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
150
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100151 b fiq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100152end_vector_entry cortex_a76_fiq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100153
154vector_entry cortex_a76_serror_aarch64
Bipin Raviee56a8a2022-02-08 19:32:38 -0600155
156#if WORKAROUND_CVE_2022_23960
157 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
158#endif /* WORKAROUND_CVE_2022_23960 */
159
160#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100161 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600162#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
163
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100164 b serror_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100165end_vector_entry cortex_a76_serror_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100166
167 /* ---------------------------------------------------------------------
168 * Lower EL using AArch32 : 0x600 - 0x800
169 * ---------------------------------------------------------------------
170 */
171vector_entry cortex_a76_sync_exception_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600172
173#if WORKAROUND_CVE_2022_23960
174 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
175#endif /* WORKAROUND_CVE_2022_23960 */
176
177#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100178 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600179#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
180
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100181 b sync_exception_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100182end_vector_entry cortex_a76_sync_exception_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100183
184vector_entry cortex_a76_irq_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600185
186#if WORKAROUND_CVE_2022_23960
187 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
188#endif /* WORKAROUND_CVE_2022_23960 */
189
190#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100191 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600192#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
193
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100194 b irq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100195end_vector_entry cortex_a76_irq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100196
197vector_entry cortex_a76_fiq_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600198
199#if WORKAROUND_CVE_2022_23960
200 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
201#endif /* WORKAROUND_CVE_2022_23960 */
202
203#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100204 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600205#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
206
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100207 b fiq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100208end_vector_entry cortex_a76_fiq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100209
210vector_entry cortex_a76_serror_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600211
212#if WORKAROUND_CVE_2022_23960
213 apply_cve_2022_23960_bhb_wa CORTEX_A76_BHB_LOOP_COUNT
214#endif /* WORKAROUND_CVE_2022_23960 */
215
216#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100217 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
Bipin Raviee56a8a2022-02-08 19:32:38 -0600218#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639*/
219
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100220 b serror_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100221end_vector_entry cortex_a76_serror_aarch32
Bipin Raviee56a8a2022-02-08 19:32:38 -0600222#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 || WORKAROUND_CVE_2022_23960 */
Bipin Ravif267b372022-02-02 23:03:28 -0600223
Bipin Raviee56a8a2022-02-08 19:32:38 -0600224#if DYNAMIC_WORKAROUND_CVE_2018_3639
Bipin Ravif267b372022-02-02 23:03:28 -0600225 /*
226 * -----------------------------------------------------------------
227 * This function applies the mitigation for CVE-2018-3639
228 * specifically for sync exceptions. It implements a fast path
229 * where `SMCCC_ARCH_WORKAROUND_2` SMC calls from a lower EL
230 * running in AArch64 will go through the fast and return early.
231 *
232 * In the fast path x0-x3 registers do not need to be restored as the
233 * calling context will have saved them.
234 *
235 * Caller must pass value of esr_el3 to compare via x2.
236 * Save and restore these registers outside of this function from the
237 * context before jumping to the main runtime vector table entry.
238 *
239 * Shall clobber: x0-x3, x30
240 * -----------------------------------------------------------------
241 */
242func apply_cve_2018_3639_sync_wa
243 /*
244 * Ensure SMC is coming from A64/A32 state on #0
245 * with W0 = SMCCC_ARCH_WORKAROUND_2
246 *
247 * This sequence evaluates as:
248 * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
249 * allowing use of a single branch operation
250 * X2 populated outside this function with the SMC FID.
251 */
252 orr w3, wzr, #SMCCC_ARCH_WORKAROUND_2
253 cmp x0, x3
254 mrs x3, esr_el3
255
256 ccmp w2, w3, #0, eq
257 /*
258 * Static predictor will predict a fall-through, optimizing
259 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
260 */
261 bne 1f
262
263 /*
264 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
265 * fast path.
266 */
267 cmp x1, xzr /* enable/disable check */
268
269 /*
270 * When the calling context wants mitigation disabled,
271 * we program the mitigation disable function in the
272 * CPU context, which gets invoked on subsequent exits from
273 * EL3 via the `el3_exit` function. Otherwise NULL is
274 * programmed in the CPU context, which results in caller's
275 * inheriting the EL3 mitigation state (enabled) on subsequent
276 * `el3_exit`.
277 */
278 mov x0, xzr
279 adr x1, cortex_a76_disable_wa_cve_2018_3639
280 csel x1, x1, x0, eq
281 str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
282
283 mrs x2, CORTEX_A76_CPUACTLR2_EL1
284 orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
285 bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
286 csel x3, x3, x1, eq
287 msr CORTEX_A76_CPUACTLR2_EL1, x3
288 ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
289 /*
290 * `SMCCC_ARCH_WORKAROUND_2`fast path return to lower EL.
291 */
292 exception_return /* exception_return contains ISB */
2931:
294 ret
295endfunc apply_cve_2018_3639_sync_wa
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000296#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100297
Louis Mayencourt09924472019-02-21 17:35:07 +0000298 /* --------------------------------------------------
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000299 * Errata Workaround for Cortex A76 Errata #1073348.
300 * This applies only to revision <= r1p0 of Cortex A76.
301 * Inputs:
302 * x0: variant[4:7] and revision[0:3] of current cpu.
303 * Shall clobber: x0-x17
304 * --------------------------------------------------
305 */
306func errata_a76_1073348_wa
307 /*
308 * Compare x0 against revision r1p0
309 */
310 mov x17, x30
311 bl check_errata_1073348
312 cbz x0, 1f
313 mrs x1, CORTEX_A76_CPUACTLR_EL1
314 orr x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
315 msr CORTEX_A76_CPUACTLR_EL1, x1
316 isb
3171:
318 ret x17
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100319endfunc errata_a76_1073348_wa
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000320
321func check_errata_1073348
322 mov x1, #0x10
323 b cpu_rev_var_ls
324endfunc check_errata_1073348
325
326 /* --------------------------------------------------
Louis Mayencourt09924472019-02-21 17:35:07 +0000327 * Errata Workaround for Cortex A76 Errata #1130799.
328 * This applies only to revision <= r2p0 of Cortex A76.
329 * Inputs:
330 * x0: variant[4:7] and revision[0:3] of current cpu.
331 * Shall clobber: x0-x17
332 * --------------------------------------------------
333 */
334func errata_a76_1130799_wa
335 /*
336 * Compare x0 against revision r2p0
337 */
338 mov x17, x30
339 bl check_errata_1130799
340 cbz x0, 1f
341 mrs x1, CORTEX_A76_CPUACTLR2_EL1
342 orr x1, x1 ,#(1 << 59)
343 msr CORTEX_A76_CPUACTLR2_EL1, x1
344 isb
3451:
346 ret x17
347endfunc errata_a76_1130799_wa
348
349func check_errata_1130799
350 mov x1, #0x20
351 b cpu_rev_var_ls
352endfunc check_errata_1130799
353
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000354 /* --------------------------------------------------
355 * Errata Workaround for Cortex A76 Errata #1220197.
356 * This applies only to revision <= r2p0 of Cortex A76.
357 * Inputs:
358 * x0: variant[4:7] and revision[0:3] of current cpu.
359 * Shall clobber: x0-x17
360 * --------------------------------------------------
361 */
362func errata_a76_1220197_wa
363/*
364 * Compare x0 against revision r2p0
365 */
366 mov x17, x30
367 bl check_errata_1220197
368 cbz x0, 1f
369 mrs x1, CORTEX_A76_CPUECTLR_EL1
370 orr x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
371 msr CORTEX_A76_CPUECTLR_EL1, x1
372 isb
3731:
374 ret x17
375endfunc errata_a76_1220197_wa
376
377func check_errata_1220197
378 mov x1, #0x20
379 b cpu_rev_var_ls
380endfunc check_errata_1220197
381
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100382 /* --------------------------------------------------
383 * Errata Workaround for Cortex A76 Errata #1257314.
384 * This applies only to revision <= r3p0 of Cortex A76.
385 * Inputs:
386 * x0: variant[4:7] and revision[0:3] of current cpu.
387 * Shall clobber: x0-x17
388 * --------------------------------------------------
389 */
390func errata_a76_1257314_wa
391 /*
392 * Compare x0 against revision r3p0
393 */
394 mov x17, x30
395 bl check_errata_1257314
396 cbz x0, 1f
397 mrs x1, CORTEX_A76_CPUACTLR3_EL1
398 orr x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
399 msr CORTEX_A76_CPUACTLR3_EL1, x1
400 isb
4011:
402 ret x17
403endfunc errata_a76_1257314_wa
404
405func check_errata_1257314
406 mov x1, #0x30
407 b cpu_rev_var_ls
408endfunc check_errata_1257314
409
410 /* --------------------------------------------------
411 * Errata Workaround for Cortex A76 Errata #1262888.
412 * This applies only to revision <= r3p0 of Cortex A76.
413 * Inputs:
414 * x0: variant[4:7] and revision[0:3] of current cpu.
415 * Shall clobber: x0-x17
416 * --------------------------------------------------
417 */
418func errata_a76_1262888_wa
419 /*
420 * Compare x0 against revision r3p0
421 */
422 mov x17, x30
423 bl check_errata_1262888
424 cbz x0, 1f
425 mrs x1, CORTEX_A76_CPUECTLR_EL1
426 orr x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
427 msr CORTEX_A76_CPUECTLR_EL1, x1
428 isb
4291:
430 ret x17
431endfunc errata_a76_1262888_wa
432
433func check_errata_1262888
434 mov x1, #0x30
435 b cpu_rev_var_ls
436endfunc check_errata_1262888
437
Soby Mathew16d006b2019-05-03 13:17:56 +0100438 /* ---------------------------------------------------
439 * Errata Workaround for Cortex A76 Errata #1286807.
440 * This applies only to revision <= r3p0 of Cortex A76.
441 * Due to the nature of the errata it is applied unconditionally
442 * when built in, report it as applicable in this case
443 * ---------------------------------------------------
444 */
445func check_errata_1286807
446#if ERRATA_A76_1286807
447 mov x0, #ERRATA_APPLIES
448 ret
449#else
450 mov x1, #0x30
451 b cpu_rev_var_ls
452#endif
453endfunc check_errata_1286807
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100454
johpow019603f982020-05-29 14:17:38 -0500455 /* --------------------------------------------------
456 * Errata workaround for Cortex A76 Errata #1791580.
457 * This applies to revisions <= r4p0 of Cortex A76.
458 * Inputs:
459 * x0: variant[4:7] and revision[0:3] of current cpu.
460 * Shall clobber: x0-x17
461 * --------------------------------------------------
462 */
463func errata_a76_1791580_wa
464 /* Compare x0 against revision r4p0 */
465 mov x17, x30
466 bl check_errata_1791580
467 cbz x0, 1f
468 mrs x1, CORTEX_A76_CPUACTLR2_EL1
469 orr x1, x1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
470 msr CORTEX_A76_CPUACTLR2_EL1, x1
471 isb
4721:
473 ret x17
474endfunc errata_a76_1791580_wa
475
476func check_errata_1791580
477 /* Applies to everything <=r4p0. */
478 mov x1, #0x40
479 b cpu_rev_var_ls
480endfunc check_errata_1791580
481
johpow015c9ed082020-06-02 15:02:28 -0500482 /* --------------------------------------------------
johpow0181365e32020-09-29 17:19:09 -0500483 * Errata Workaround for Cortex A76 Errata #1262606,
484 * #1275112, and #1868343. #1262606 and #1275112
485 * apply to revisions <= r3p0 and #1868343 applies to
486 * revisions <= r4p0.
487 * Inputs:
488 * x0: variant[4:7] and revision[0:3] of current cpu.
489 * Shall clobber: x0-x17
490 * --------------------------------------------------
491 */
492
493func errata_a76_1262606_1275112_1868343_wa
494 mov x17, x30
495
496/* Check for <= r3p0 cases and branch if check passes. */
497#if ERRATA_A76_1262606 || ERRATA_A76_1275112
498 bl check_errata_1262606
499 cbnz x0, 1f
500#endif
501
502/* Check for <= r4p0 cases and branch if check fails. */
503#if ERRATA_A76_1868343
504 bl check_errata_1868343
505 cbz x0, 2f
506#endif
5071:
508 mrs x1, CORTEX_A76_CPUACTLR_EL1
509 orr x1, x1, #CORTEX_A76_CPUACTLR_EL1_BIT_13
510 msr CORTEX_A76_CPUACTLR_EL1, x1
511 isb
5122:
513 ret x17
514endfunc errata_a76_1262606_1275112_1868343_wa
515
516func check_errata_1262606
517 mov x1, #0x30
518 b cpu_rev_var_ls
519endfunc check_errata_1262606
520
521func check_errata_1275112
522 mov x1, #0x30
523 b cpu_rev_var_ls
524endfunc check_errata_1275112
525
526func check_errata_1868343
527 mov x1, #0x40
528 b cpu_rev_var_ls
529endfunc check_errata_1868343
530
johpow013e34e922020-12-15 19:02:18 -0600531/* --------------------------------------------------
532 * Errata Workaround for A76 Erratum 1946160.
533 * This applies to revisions r3p0 - r4p1 of A76.
534 * It also exists in r0p0 - r2p0 but there is no fix
535 * in those revisions.
536 * Inputs:
537 * x0: variant[4:7] and revision[0:3] of current cpu.
538 * Shall clobber: x0-x17
539 * --------------------------------------------------
540 */
541func errata_a76_1946160_wa
542 /* Compare x0 against revisions r3p0 - r4p1 */
543 mov x17, x30
544 bl check_errata_1946160
545 cbz x0, 1f
546
547 mov x0, #3
548 msr S3_6_C15_C8_0, x0
549 ldr x0, =0x10E3900002
550 msr S3_6_C15_C8_2, x0
551 ldr x0, =0x10FFF00083
552 msr S3_6_C15_C8_3, x0
553 ldr x0, =0x2001003FF
554 msr S3_6_C15_C8_1, x0
555
556 mov x0, #4
557 msr S3_6_C15_C8_0, x0
558 ldr x0, =0x10E3800082
559 msr S3_6_C15_C8_2, x0
560 ldr x0, =0x10FFF00083
561 msr S3_6_C15_C8_3, x0
562 ldr x0, =0x2001003FF
563 msr S3_6_C15_C8_1, x0
564
565 mov x0, #5
566 msr S3_6_C15_C8_0, x0
567 ldr x0, =0x10E3800200
568 msr S3_6_C15_C8_2, x0
569 ldr x0, =0x10FFF003E0
570 msr S3_6_C15_C8_3, x0
571 ldr x0, =0x2001003FF
572 msr S3_6_C15_C8_1, x0
573
574 isb
5751:
576 ret x17
577endfunc errata_a76_1946160_wa
578
579func check_errata_1946160
580 /* Applies to revisions r3p0 - r4p1. */
581 mov x1, #0x30
582 mov x2, #0x41
583 b cpu_rev_var_range
584endfunc check_errata_1946160
585
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100586func check_errata_cve_2018_3639
587#if WORKAROUND_CVE_2018_3639
588 mov x0, #ERRATA_APPLIES
589#else
590 mov x0, #ERRATA_MISSING
591#endif
592 ret
593endfunc check_errata_cve_2018_3639
594
595func cortex_a76_disable_wa_cve_2018_3639
596 mrs x0, CORTEX_A76_CPUACTLR2_EL1
597 bic x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
598 msr CORTEX_A76_CPUACTLR2_EL1, x0
599 isb
600 ret
601endfunc cortex_a76_disable_wa_cve_2018_3639
602
Manish V Badarkhe7672edf2020-08-03 18:43:14 +0100603 /* --------------------------------------------------------------
604 * Errata Workaround for Cortex A76 Errata #1165522.
605 * This applies only to revisions <= r3p0 of Cortex A76.
606 * Due to the nature of the errata it is applied unconditionally
607 * when built in, report it as applicable in this case
608 * --------------------------------------------------------------
609 */
610func check_errata_1165522
611#if ERRATA_A76_1165522
612 mov x0, #ERRATA_APPLIES
613 ret
614#else
615 mov x1, #0x30
616 b cpu_rev_var_ls
617#endif
618endfunc check_errata_1165522
619
Bipin Raviee56a8a2022-02-08 19:32:38 -0600620func check_errata_cve_2022_23960
621#if WORKAROUND_CVE_2022_23960
622 mov x0, #ERRATA_APPLIES
623#else
624 mov x0, #ERRATA_MISSING
625#endif /* WORKAROUND_CVE_2022_23960 */
626 ret
627endfunc check_errata_cve_2022_23960
628
Louis Mayencourt09924472019-02-21 17:35:07 +0000629 /* -------------------------------------------------
630 * The CPU Ops reset function for Cortex-A76.
631 * Shall clobber: x0-x19
632 * -------------------------------------------------
633 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100634func cortex_a76_reset_func
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100635 mov x19, x30
Louis Mayencourt09924472019-02-21 17:35:07 +0000636 bl cpu_get_rev_var
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000637 mov x18, x0
Louis Mayencourt09924472019-02-21 17:35:07 +0000638
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000639#if ERRATA_A76_1073348
640 mov x0, x18
641 bl errata_a76_1073348_wa
642#endif
643
Louis Mayencourt09924472019-02-21 17:35:07 +0000644#if ERRATA_A76_1130799
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000645 mov x0, x18
Louis Mayencourt09924472019-02-21 17:35:07 +0000646 bl errata_a76_1130799_wa
647#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000648
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000649#if ERRATA_A76_1220197
650 mov x0, x18
651 bl errata_a76_1220197_wa
652#endif
653
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100654#if ERRATA_A76_1257314
655 mov x0, x18
656 bl errata_a76_1257314_wa
657#endif
658
johpow0181365e32020-09-29 17:19:09 -0500659#if ERRATA_A76_1262606 || ERRATA_A76_1275112 || ERRATA_A76_1868343
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100660 mov x0, x18
johpow0181365e32020-09-29 17:19:09 -0500661 bl errata_a76_1262606_1275112_1868343_wa
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100662#endif
663
664#if ERRATA_A76_1262888
665 mov x0, x18
666 bl errata_a76_1262888_wa
667#endif
668
johpow019603f982020-05-29 14:17:38 -0500669#if ERRATA_A76_1791580
670 mov x0, x18
671 bl errata_a76_1791580_wa
672#endif
673
johpow013e34e922020-12-15 19:02:18 -0600674#if ERRATA_A76_1946160
675 mov x0, x18
676 bl errata_a76_1946160_wa
677#endif
678
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100679#if WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000680 /* If the PE implements SSBS, we don't need the dynamic workaround */
681 mrs x0, id_aa64pfr1_el1
682 lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
Ambroise Vincent6dbbe432019-03-07 14:31:33 +0000683 and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000684#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
685 cmp x0, 0
686 ASM_ASSERT(ne)
687#endif
688#if DYNAMIC_WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000689 cbnz x0, 1f
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100690 mrs x0, CORTEX_A76_CPUACTLR2_EL1
691 orr x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
692 msr CORTEX_A76_CPUACTLR2_EL1, x0
693 isb
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100694
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000695#ifdef IMAGE_BL31
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100696 /*
697 * The Cortex-A76 generic vectors are overwritten to use the vectors
Ambroise Vincent6dbbe432019-03-07 14:31:33 +0000698 * defined above. This is required in order to apply mitigation
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100699 * against CVE-2018-3639 on exception entry from lower ELs.
Bipin Raviee56a8a2022-02-08 19:32:38 -0600700 * If the below vector table is used, skip overriding it again for
701 * CVE_2022_23960 as both use the same vbar.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100702 */
Bipin Raviee56a8a2022-02-08 19:32:38 -0600703 adr x0, cortex_a76_wa_cve_vbar
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100704 msr vbar_el3, x0
705 isb
Bipin Raviee56a8a2022-02-08 19:32:38 -0600706 b 2f
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000707#endif /* IMAGE_BL31 */
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100708
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +00007091:
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000710#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
711#endif /* WORKAROUND_CVE_2018_3639 */
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000712
Bipin Raviee56a8a2022-02-08 19:32:38 -0600713#if IMAGE_BL31 && WORKAROUND_CVE_2022_23960
714 /*
715 * The Cortex-A76 generic vectors are overridden to apply errata
716 * mitigation on exception entry from lower ELs. This will be bypassed
717 * if DYNAMIC_WORKAROUND_CVE_2018_3639 has overridden the vectors.
718 */
719 adr x0, cortex_a76_wa_cve_vbar
720 msr vbar_el3, x0
721 isb
722#endif /* IMAGE_BL31 && WORKAROUND_CVE_2022_23960 */
7232:
724
Louis Mayencourt4498b152019-04-09 16:29:01 +0100725#if ERRATA_DSU_798953
726 bl errata_dsu_798953_wa
727#endif
728
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100729#if ERRATA_DSU_936184
730 bl errata_dsu_936184_wa
731#endif
Louis Mayencourt4498b152019-04-09 16:29:01 +0100732
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100733 ret x19
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100734endfunc cortex_a76_reset_func
735
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100736 /* ---------------------------------------------
737 * HW will do the cache maintenance while powering down
738 * ---------------------------------------------
739 */
740func cortex_a76_core_pwr_dwn
741 /* ---------------------------------------------
742 * Enable CPU power down bit in power control register
743 * ---------------------------------------------
744 */
745 mrs x0, CORTEX_A76_CPUPWRCTLR_EL1
746 orr x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
747 msr CORTEX_A76_CPUPWRCTLR_EL1, x0
748 isb
749 ret
750endfunc cortex_a76_core_pwr_dwn
751
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100752#if REPORT_ERRATA
753/*
Louis Mayencourt4498b152019-04-09 16:29:01 +0100754 * Errata printing function for Cortex A76. Must follow AAPCS.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100755 */
756func cortex_a76_errata_report
757 stp x8, x30, [sp, #-16]!
758
759 bl cpu_get_rev_var
760 mov x8, x0
761
762 /*
763 * Report all errata. The revision-variant information is passed to
764 * checking functions of each errata.
765 */
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000766 report_errata ERRATA_A76_1073348, cortex_a76, 1073348
Louis Mayencourt09924472019-02-21 17:35:07 +0000767 report_errata ERRATA_A76_1130799, cortex_a76, 1130799
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000768 report_errata ERRATA_A76_1220197, cortex_a76, 1220197
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100769 report_errata ERRATA_A76_1257314, cortex_a76, 1257314
770 report_errata ERRATA_A76_1262606, cortex_a76, 1262606
771 report_errata ERRATA_A76_1262888, cortex_a76, 1262888
772 report_errata ERRATA_A76_1275112, cortex_a76, 1275112
Soby Mathew16d006b2019-05-03 13:17:56 +0100773 report_errata ERRATA_A76_1286807, cortex_a76, 1286807
johpow019603f982020-05-29 14:17:38 -0500774 report_errata ERRATA_A76_1791580, cortex_a76, 1791580
Manish V Badarkhe7672edf2020-08-03 18:43:14 +0100775 report_errata ERRATA_A76_1165522, cortex_a76, 1165522
johpow0181365e32020-09-29 17:19:09 -0500776 report_errata ERRATA_A76_1868343, cortex_a76, 1868343
johpow013e34e922020-12-15 19:02:18 -0600777 report_errata ERRATA_A76_1946160, cortex_a76, 1946160
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100778 report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
Louis Mayencourt4498b152019-04-09 16:29:01 +0100779 report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100780 report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
Bipin Raviee56a8a2022-02-08 19:32:38 -0600781 report_errata WORKAROUND_CVE_2022_23960, cortex_a76, cve_2022_23960
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100782
783 ldp x8, x30, [sp], #16
784 ret
785endfunc cortex_a76_errata_report
786#endif
787
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100788 /* ---------------------------------------------
789 * This function provides cortex_a76 specific
790 * register information for crash reporting.
791 * It needs to return with x6 pointing to
792 * a list of register names in ascii and
793 * x8 - x15 having values of registers to be
794 * reported.
795 * ---------------------------------------------
796 */
797.section .rodata.cortex_a76_regs, "aS"
798cortex_a76_regs: /* The ascii list of register names to be reported */
799 .asciz "cpuectlr_el1", ""
800
801func cortex_a76_cpu_reg_dump
802 adr x6, cortex_a76_regs
803 mrs x8, CORTEX_A76_CPUECTLR_EL1
804 ret
805endfunc cortex_a76_cpu_reg_dump
806
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100807declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
808 cortex_a76_reset_func, \
809 CPU_NO_EXTRA1_FUNC, \
810 cortex_a76_disable_wa_cve_2018_3639, \
Bipin Ravicaa2e052022-02-23 23:45:50 -0600811 CPU_NO_EXTRA3_FUNC, \
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100812 cortex_a76_core_pwr_dwn