blob: 4f7f4bb9a9d004a4d22afbdb231735bf716f6df1 [file] [log] [blame]
Isla Mitchellea84d6b2017-08-03 16:04:46 +01001/*
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -08002 * Copyright (c) 2017-2020, ARM Limited and Contributors. All rights reserved.
Isla Mitchellea84d6b2017-08-03 16:04:46 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00009#include <common/bl_common.h>
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010010#include <context.h>
Isla Mitchellea84d6b2017-08-03 16:04:46 +010011#include <cortex_a76.h>
12#include <cpu_macros.S>
13#include <plat_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000014#include <services/arm_arch_svc.h>
Isla Mitchellea84d6b2017-08-03 16:04:46 +010015
John Tsichritzisfe6df392019-03-19 17:20:52 +000016/* Hardware handled coherency */
17#if HW_ASSISTED_COHERENCY == 0
18#error "Cortex-A76 must be compiled with HW_ASSISTED_COHERENCY enabled"
19#endif
20
John Tsichritzis7557c662019-06-03 13:54:30 +010021/* 64-bit only core */
22#if CTX_INCLUDE_AARCH32_REGS == 1
23#error "Cortex-A76 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
24#endif
25
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010026#define ESR_EL3_A64_SMC0 0x5e000000
27#define ESR_EL3_A32_SMC0 0x4e000000
28
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +000029#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010030 /*
31 * This macro applies the mitigation for CVE-2018-3639.
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000032 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010033 * SMC calls from a lower EL running in AArch32 or AArch64
34 * will go through the fast and return early.
35 *
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000036 * The macro saves x2-x3 to the context. In the fast path
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010037 * x0-x3 registers do not need to be restored as the calling
38 * context will have saved them.
39 */
40 .macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
41 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
42
43 .if \_is_sync_exception
44 /*
45 * Ensure SMC is coming from A64/A32 state on #0
46 * with W0 = SMCCC_ARCH_WORKAROUND_2
47 *
48 * This sequence evaluates as:
49 * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
50 * allowing use of a single branch operation
51 */
52 orr w2, wzr, #SMCCC_ARCH_WORKAROUND_2
53 cmp x0, x2
54 mrs x3, esr_el3
55 mov_imm w2, \_esr_el3_val
56 ccmp w2, w3, #0, eq
57 /*
58 * Static predictor will predict a fall-through, optimizing
59 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
60 */
61 bne 1f
62
63 /*
64 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
65 * fast path.
66 */
67 cmp x1, xzr /* enable/disable check */
68
69 /*
70 * When the calling context wants mitigation disabled,
71 * we program the mitigation disable function in the
72 * CPU context, which gets invoked on subsequent exits from
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000073 * EL3 via the `el3_exit` function. Otherwise NULL is
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010074 * programmed in the CPU context, which results in caller's
75 * inheriting the EL3 mitigation state (enabled) on subsequent
76 * `el3_exit`.
77 */
78 mov x0, xzr
79 adr x1, cortex_a76_disable_wa_cve_2018_3639
80 csel x1, x1, x0, eq
81 str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
82
83 mrs x2, CORTEX_A76_CPUACTLR2_EL1
84 orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
85 bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
86 csel x3, x3, x1, eq
87 msr CORTEX_A76_CPUACTLR2_EL1, x3
Anthony Steinhauser0f7e6012020-01-07 15:44:06 -080088 exception_return /* exception_return contains ISB */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010089 .endif
901:
91 /*
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000092 * Always enable v4 mitigation during EL3 execution. This is not
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010093 * required for the fast path above because it does not perform any
94 * memory loads.
95 */
96 mrs x2, CORTEX_A76_CPUACTLR2_EL1
97 orr x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
98 msr CORTEX_A76_CPUACTLR2_EL1, x2
99 isb
100
101 /*
102 * The caller may have passed arguments to EL3 via x2-x3.
103 * Restore these registers from the context before jumping to the
104 * main runtime vector table entry.
105 */
106 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
107 .endm
108
109vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
110
111 /* ---------------------------------------------------------------------
112 * Current EL with SP_EL0 : 0x0 - 0x200
113 * ---------------------------------------------------------------------
114 */
115vector_entry cortex_a76_sync_exception_sp_el0
116 b sync_exception_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100117end_vector_entry cortex_a76_sync_exception_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100118
119vector_entry cortex_a76_irq_sp_el0
120 b irq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100121end_vector_entry cortex_a76_irq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100122
123vector_entry cortex_a76_fiq_sp_el0
124 b fiq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100125end_vector_entry cortex_a76_fiq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100126
127vector_entry cortex_a76_serror_sp_el0
128 b serror_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100129end_vector_entry cortex_a76_serror_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100130
131 /* ---------------------------------------------------------------------
132 * Current EL with SP_ELx: 0x200 - 0x400
133 * ---------------------------------------------------------------------
134 */
135vector_entry cortex_a76_sync_exception_sp_elx
136 b sync_exception_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100137end_vector_entry cortex_a76_sync_exception_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100138
139vector_entry cortex_a76_irq_sp_elx
140 b irq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100141end_vector_entry cortex_a76_irq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100142
143vector_entry cortex_a76_fiq_sp_elx
144 b fiq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100145end_vector_entry cortex_a76_fiq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100146
147vector_entry cortex_a76_serror_sp_elx
148 b serror_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100149end_vector_entry cortex_a76_serror_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100150
151 /* ---------------------------------------------------------------------
152 * Lower EL using AArch64 : 0x400 - 0x600
153 * ---------------------------------------------------------------------
154 */
155vector_entry cortex_a76_sync_exception_aarch64
156 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
157 b sync_exception_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100158end_vector_entry cortex_a76_sync_exception_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100159
160vector_entry cortex_a76_irq_aarch64
161 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
162 b irq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100163end_vector_entry cortex_a76_irq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100164
165vector_entry cortex_a76_fiq_aarch64
166 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
167 b fiq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100168end_vector_entry cortex_a76_fiq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100169
170vector_entry cortex_a76_serror_aarch64
171 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
172 b serror_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100173end_vector_entry cortex_a76_serror_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100174
175 /* ---------------------------------------------------------------------
176 * Lower EL using AArch32 : 0x600 - 0x800
177 * ---------------------------------------------------------------------
178 */
179vector_entry cortex_a76_sync_exception_aarch32
180 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
181 b sync_exception_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100182end_vector_entry cortex_a76_sync_exception_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100183
184vector_entry cortex_a76_irq_aarch32
185 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
186 b irq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100187end_vector_entry cortex_a76_irq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100188
189vector_entry cortex_a76_fiq_aarch32
190 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
191 b fiq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100192end_vector_entry cortex_a76_fiq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100193
194vector_entry cortex_a76_serror_aarch32
195 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
196 b serror_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100197end_vector_entry cortex_a76_serror_aarch32
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000198#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100199
Louis Mayencourt09924472019-02-21 17:35:07 +0000200 /* --------------------------------------------------
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000201 * Errata Workaround for Cortex A76 Errata #1073348.
202 * This applies only to revision <= r1p0 of Cortex A76.
203 * Inputs:
204 * x0: variant[4:7] and revision[0:3] of current cpu.
205 * Shall clobber: x0-x17
206 * --------------------------------------------------
207 */
208func errata_a76_1073348_wa
209 /*
210 * Compare x0 against revision r1p0
211 */
212 mov x17, x30
213 bl check_errata_1073348
214 cbz x0, 1f
215 mrs x1, CORTEX_A76_CPUACTLR_EL1
216 orr x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
217 msr CORTEX_A76_CPUACTLR_EL1, x1
218 isb
2191:
220 ret x17
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100221endfunc errata_a76_1073348_wa
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000222
223func check_errata_1073348
224 mov x1, #0x10
225 b cpu_rev_var_ls
226endfunc check_errata_1073348
227
228 /* --------------------------------------------------
Louis Mayencourt09924472019-02-21 17:35:07 +0000229 * Errata Workaround for Cortex A76 Errata #1130799.
230 * This applies only to revision <= r2p0 of Cortex A76.
231 * Inputs:
232 * x0: variant[4:7] and revision[0:3] of current cpu.
233 * Shall clobber: x0-x17
234 * --------------------------------------------------
235 */
236func errata_a76_1130799_wa
237 /*
238 * Compare x0 against revision r2p0
239 */
240 mov x17, x30
241 bl check_errata_1130799
242 cbz x0, 1f
243 mrs x1, CORTEX_A76_CPUACTLR2_EL1
244 orr x1, x1 ,#(1 << 59)
245 msr CORTEX_A76_CPUACTLR2_EL1, x1
246 isb
2471:
248 ret x17
249endfunc errata_a76_1130799_wa
250
251func check_errata_1130799
252 mov x1, #0x20
253 b cpu_rev_var_ls
254endfunc check_errata_1130799
255
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000256 /* --------------------------------------------------
257 * Errata Workaround for Cortex A76 Errata #1220197.
258 * This applies only to revision <= r2p0 of Cortex A76.
259 * Inputs:
260 * x0: variant[4:7] and revision[0:3] of current cpu.
261 * Shall clobber: x0-x17
262 * --------------------------------------------------
263 */
264func errata_a76_1220197_wa
265/*
266 * Compare x0 against revision r2p0
267 */
268 mov x17, x30
269 bl check_errata_1220197
270 cbz x0, 1f
271 mrs x1, CORTEX_A76_CPUECTLR_EL1
272 orr x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
273 msr CORTEX_A76_CPUECTLR_EL1, x1
274 isb
2751:
276 ret x17
277endfunc errata_a76_1220197_wa
278
279func check_errata_1220197
280 mov x1, #0x20
281 b cpu_rev_var_ls
282endfunc check_errata_1220197
283
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100284 /* --------------------------------------------------
285 * Errata Workaround for Cortex A76 Errata #1257314.
286 * This applies only to revision <= r3p0 of Cortex A76.
287 * Inputs:
288 * x0: variant[4:7] and revision[0:3] of current cpu.
289 * Shall clobber: x0-x17
290 * --------------------------------------------------
291 */
292func errata_a76_1257314_wa
293 /*
294 * Compare x0 against revision r3p0
295 */
296 mov x17, x30
297 bl check_errata_1257314
298 cbz x0, 1f
299 mrs x1, CORTEX_A76_CPUACTLR3_EL1
300 orr x1, x1, CORTEX_A76_CPUACTLR3_EL1_BIT_10
301 msr CORTEX_A76_CPUACTLR3_EL1, x1
302 isb
3031:
304 ret x17
305endfunc errata_a76_1257314_wa
306
307func check_errata_1257314
308 mov x1, #0x30
309 b cpu_rev_var_ls
310endfunc check_errata_1257314
311
312 /* --------------------------------------------------
313 * Errata Workaround for Cortex A76 Errata #1262888.
314 * This applies only to revision <= r3p0 of Cortex A76.
315 * Inputs:
316 * x0: variant[4:7] and revision[0:3] of current cpu.
317 * Shall clobber: x0-x17
318 * --------------------------------------------------
319 */
320func errata_a76_1262888_wa
321 /*
322 * Compare x0 against revision r3p0
323 */
324 mov x17, x30
325 bl check_errata_1262888
326 cbz x0, 1f
327 mrs x1, CORTEX_A76_CPUECTLR_EL1
328 orr x1, x1, CORTEX_A76_CPUECTLR_EL1_BIT_51
329 msr CORTEX_A76_CPUECTLR_EL1, x1
330 isb
3311:
332 ret x17
333endfunc errata_a76_1262888_wa
334
335func check_errata_1262888
336 mov x1, #0x30
337 b cpu_rev_var_ls
338endfunc check_errata_1262888
339
Soby Mathew16d006b2019-05-03 13:17:56 +0100340 /* ---------------------------------------------------
341 * Errata Workaround for Cortex A76 Errata #1286807.
342 * This applies only to revision <= r3p0 of Cortex A76.
343 * Due to the nature of the errata it is applied unconditionally
344 * when built in, report it as applicable in this case
345 * ---------------------------------------------------
346 */
347func check_errata_1286807
348#if ERRATA_A76_1286807
349 mov x0, #ERRATA_APPLIES
350 ret
351#else
352 mov x1, #0x30
353 b cpu_rev_var_ls
354#endif
355endfunc check_errata_1286807
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100356
johpow019603f982020-05-29 14:17:38 -0500357 /* --------------------------------------------------
358 * Errata workaround for Cortex A76 Errata #1791580.
359 * This applies to revisions <= r4p0 of Cortex A76.
360 * Inputs:
361 * x0: variant[4:7] and revision[0:3] of current cpu.
362 * Shall clobber: x0-x17
363 * --------------------------------------------------
364 */
365func errata_a76_1791580_wa
366 /* Compare x0 against revision r4p0 */
367 mov x17, x30
368 bl check_errata_1791580
369 cbz x0, 1f
370 mrs x1, CORTEX_A76_CPUACTLR2_EL1
371 orr x1, x1, CORTEX_A76_CPUACTLR2_EL1_BIT_2
372 msr CORTEX_A76_CPUACTLR2_EL1, x1
373 isb
3741:
375 ret x17
376endfunc errata_a76_1791580_wa
377
378func check_errata_1791580
379 /* Applies to everything <=r4p0. */
380 mov x1, #0x40
381 b cpu_rev_var_ls
382endfunc check_errata_1791580
383
johpow015c9ed082020-06-02 15:02:28 -0500384 /* --------------------------------------------------
johpow0181365e32020-09-29 17:19:09 -0500385 * Errata Workaround for Cortex A76 Errata #1262606,
386 * #1275112, and #1868343. #1262606 and #1275112
387 * apply to revisions <= r3p0 and #1868343 applies to
388 * revisions <= r4p0.
389 * Inputs:
390 * x0: variant[4:7] and revision[0:3] of current cpu.
391 * Shall clobber: x0-x17
392 * --------------------------------------------------
393 */
394
395func errata_a76_1262606_1275112_1868343_wa
396 mov x17, x30
397
398/* Check for <= r3p0 cases and branch if check passes. */
399#if ERRATA_A76_1262606 || ERRATA_A76_1275112
400 bl check_errata_1262606
401 cbnz x0, 1f
402#endif
403
404/* Check for <= r4p0 cases and branch if check fails. */
405#if ERRATA_A76_1868343
406 bl check_errata_1868343
407 cbz x0, 2f
408#endif
4091:
410 mrs x1, CORTEX_A76_CPUACTLR_EL1
411 orr x1, x1, #CORTEX_A76_CPUACTLR_EL1_BIT_13
412 msr CORTEX_A76_CPUACTLR_EL1, x1
413 isb
4142:
415 ret x17
416endfunc errata_a76_1262606_1275112_1868343_wa
417
418func check_errata_1262606
419 mov x1, #0x30
420 b cpu_rev_var_ls
421endfunc check_errata_1262606
422
423func check_errata_1275112
424 mov x1, #0x30
425 b cpu_rev_var_ls
426endfunc check_errata_1275112
427
428func check_errata_1868343
429 mov x1, #0x40
430 b cpu_rev_var_ls
431endfunc check_errata_1868343
432
johpow013e34e922020-12-15 19:02:18 -0600433/* --------------------------------------------------
434 * Errata Workaround for A76 Erratum 1946160.
435 * This applies to revisions r3p0 - r4p1 of A76.
436 * It also exists in r0p0 - r2p0 but there is no fix
437 * in those revisions.
438 * Inputs:
439 * x0: variant[4:7] and revision[0:3] of current cpu.
440 * Shall clobber: x0-x17
441 * --------------------------------------------------
442 */
443func errata_a76_1946160_wa
444 /* Compare x0 against revisions r3p0 - r4p1 */
445 mov x17, x30
446 bl check_errata_1946160
447 cbz x0, 1f
448
449 mov x0, #3
450 msr S3_6_C15_C8_0, x0
451 ldr x0, =0x10E3900002
452 msr S3_6_C15_C8_2, x0
453 ldr x0, =0x10FFF00083
454 msr S3_6_C15_C8_3, x0
455 ldr x0, =0x2001003FF
456 msr S3_6_C15_C8_1, x0
457
458 mov x0, #4
459 msr S3_6_C15_C8_0, x0
460 ldr x0, =0x10E3800082
461 msr S3_6_C15_C8_2, x0
462 ldr x0, =0x10FFF00083
463 msr S3_6_C15_C8_3, x0
464 ldr x0, =0x2001003FF
465 msr S3_6_C15_C8_1, x0
466
467 mov x0, #5
468 msr S3_6_C15_C8_0, x0
469 ldr x0, =0x10E3800200
470 msr S3_6_C15_C8_2, x0
471 ldr x0, =0x10FFF003E0
472 msr S3_6_C15_C8_3, x0
473 ldr x0, =0x2001003FF
474 msr S3_6_C15_C8_1, x0
475
476 isb
4771:
478 ret x17
479endfunc errata_a76_1946160_wa
480
481func check_errata_1946160
482 /* Applies to revisions r3p0 - r4p1. */
483 mov x1, #0x30
484 mov x2, #0x41
485 b cpu_rev_var_range
486endfunc check_errata_1946160
487
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100488func check_errata_cve_2018_3639
489#if WORKAROUND_CVE_2018_3639
490 mov x0, #ERRATA_APPLIES
491#else
492 mov x0, #ERRATA_MISSING
493#endif
494 ret
495endfunc check_errata_cve_2018_3639
496
497func cortex_a76_disable_wa_cve_2018_3639
498 mrs x0, CORTEX_A76_CPUACTLR2_EL1
499 bic x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
500 msr CORTEX_A76_CPUACTLR2_EL1, x0
501 isb
502 ret
503endfunc cortex_a76_disable_wa_cve_2018_3639
504
Manish V Badarkhe7672edf2020-08-03 18:43:14 +0100505 /* --------------------------------------------------------------
506 * Errata Workaround for Cortex A76 Errata #1165522.
507 * This applies only to revisions <= r3p0 of Cortex A76.
508 * Due to the nature of the errata it is applied unconditionally
509 * when built in, report it as applicable in this case
510 * --------------------------------------------------------------
511 */
512func check_errata_1165522
513#if ERRATA_A76_1165522
514 mov x0, #ERRATA_APPLIES
515 ret
516#else
517 mov x1, #0x30
518 b cpu_rev_var_ls
519#endif
520endfunc check_errata_1165522
521
Louis Mayencourt09924472019-02-21 17:35:07 +0000522 /* -------------------------------------------------
523 * The CPU Ops reset function for Cortex-A76.
524 * Shall clobber: x0-x19
525 * -------------------------------------------------
526 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100527func cortex_a76_reset_func
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100528 mov x19, x30
Louis Mayencourt09924472019-02-21 17:35:07 +0000529 bl cpu_get_rev_var
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000530 mov x18, x0
Louis Mayencourt09924472019-02-21 17:35:07 +0000531
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000532#if ERRATA_A76_1073348
533 mov x0, x18
534 bl errata_a76_1073348_wa
535#endif
536
Louis Mayencourt09924472019-02-21 17:35:07 +0000537#if ERRATA_A76_1130799
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000538 mov x0, x18
Louis Mayencourt09924472019-02-21 17:35:07 +0000539 bl errata_a76_1130799_wa
540#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000541
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000542#if ERRATA_A76_1220197
543 mov x0, x18
544 bl errata_a76_1220197_wa
545#endif
546
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100547#if ERRATA_A76_1257314
548 mov x0, x18
549 bl errata_a76_1257314_wa
550#endif
551
johpow0181365e32020-09-29 17:19:09 -0500552#if ERRATA_A76_1262606 || ERRATA_A76_1275112 || ERRATA_A76_1868343
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100553 mov x0, x18
johpow0181365e32020-09-29 17:19:09 -0500554 bl errata_a76_1262606_1275112_1868343_wa
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100555#endif
556
557#if ERRATA_A76_1262888
558 mov x0, x18
559 bl errata_a76_1262888_wa
560#endif
561
johpow019603f982020-05-29 14:17:38 -0500562#if ERRATA_A76_1791580
563 mov x0, x18
564 bl errata_a76_1791580_wa
565#endif
566
johpow013e34e922020-12-15 19:02:18 -0600567#if ERRATA_A76_1946160
568 mov x0, x18
569 bl errata_a76_1946160_wa
570#endif
571
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100572#if WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000573 /* If the PE implements SSBS, we don't need the dynamic workaround */
574 mrs x0, id_aa64pfr1_el1
575 lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
Ambroise Vincent6dbbe432019-03-07 14:31:33 +0000576 and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000577#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
578 cmp x0, 0
579 ASM_ASSERT(ne)
580#endif
581#if DYNAMIC_WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000582 cbnz x0, 1f
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100583 mrs x0, CORTEX_A76_CPUACTLR2_EL1
584 orr x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
585 msr CORTEX_A76_CPUACTLR2_EL1, x0
586 isb
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100587
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000588#ifdef IMAGE_BL31
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100589 /*
590 * The Cortex-A76 generic vectors are overwritten to use the vectors
Ambroise Vincent6dbbe432019-03-07 14:31:33 +0000591 * defined above. This is required in order to apply mitigation
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100592 * against CVE-2018-3639 on exception entry from lower ELs.
593 */
594 adr x0, cortex_a76_wa_cve_2018_3639_a76_vbar
595 msr vbar_el3, x0
596 isb
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000597#endif /* IMAGE_BL31 */
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100598
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +00005991:
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000600#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
601#endif /* WORKAROUND_CVE_2018_3639 */
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000602
Louis Mayencourt4498b152019-04-09 16:29:01 +0100603#if ERRATA_DSU_798953
604 bl errata_dsu_798953_wa
605#endif
606
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100607#if ERRATA_DSU_936184
608 bl errata_dsu_936184_wa
609#endif
Louis Mayencourt4498b152019-04-09 16:29:01 +0100610
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100611 ret x19
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100612endfunc cortex_a76_reset_func
613
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100614 /* ---------------------------------------------
615 * HW will do the cache maintenance while powering down
616 * ---------------------------------------------
617 */
618func cortex_a76_core_pwr_dwn
619 /* ---------------------------------------------
620 * Enable CPU power down bit in power control register
621 * ---------------------------------------------
622 */
623 mrs x0, CORTEX_A76_CPUPWRCTLR_EL1
624 orr x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
625 msr CORTEX_A76_CPUPWRCTLR_EL1, x0
626 isb
627 ret
628endfunc cortex_a76_core_pwr_dwn
629
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100630#if REPORT_ERRATA
631/*
Louis Mayencourt4498b152019-04-09 16:29:01 +0100632 * Errata printing function for Cortex A76. Must follow AAPCS.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100633 */
634func cortex_a76_errata_report
635 stp x8, x30, [sp, #-16]!
636
637 bl cpu_get_rev_var
638 mov x8, x0
639
640 /*
641 * Report all errata. The revision-variant information is passed to
642 * checking functions of each errata.
643 */
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000644 report_errata ERRATA_A76_1073348, cortex_a76, 1073348
Louis Mayencourt09924472019-02-21 17:35:07 +0000645 report_errata ERRATA_A76_1130799, cortex_a76, 1130799
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000646 report_errata ERRATA_A76_1220197, cortex_a76, 1220197
Soby Mathew1d3ba1c2019-05-01 09:43:18 +0100647 report_errata ERRATA_A76_1257314, cortex_a76, 1257314
648 report_errata ERRATA_A76_1262606, cortex_a76, 1262606
649 report_errata ERRATA_A76_1262888, cortex_a76, 1262888
650 report_errata ERRATA_A76_1275112, cortex_a76, 1275112
Soby Mathew16d006b2019-05-03 13:17:56 +0100651 report_errata ERRATA_A76_1286807, cortex_a76, 1286807
johpow019603f982020-05-29 14:17:38 -0500652 report_errata ERRATA_A76_1791580, cortex_a76, 1791580
Manish V Badarkhe7672edf2020-08-03 18:43:14 +0100653 report_errata ERRATA_A76_1165522, cortex_a76, 1165522
johpow0181365e32020-09-29 17:19:09 -0500654 report_errata ERRATA_A76_1868343, cortex_a76, 1868343
johpow013e34e922020-12-15 19:02:18 -0600655 report_errata ERRATA_A76_1946160, cortex_a76, 1946160
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100656 report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
Louis Mayencourt4498b152019-04-09 16:29:01 +0100657 report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100658 report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100659
660 ldp x8, x30, [sp], #16
661 ret
662endfunc cortex_a76_errata_report
663#endif
664
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100665 /* ---------------------------------------------
666 * This function provides cortex_a76 specific
667 * register information for crash reporting.
668 * It needs to return with x6 pointing to
669 * a list of register names in ascii and
670 * x8 - x15 having values of registers to be
671 * reported.
672 * ---------------------------------------------
673 */
674.section .rodata.cortex_a76_regs, "aS"
675cortex_a76_regs: /* The ascii list of register names to be reported */
676 .asciz "cpuectlr_el1", ""
677
678func cortex_a76_cpu_reg_dump
679 adr x6, cortex_a76_regs
680 mrs x8, CORTEX_A76_CPUECTLR_EL1
681 ret
682endfunc cortex_a76_cpu_reg_dump
683
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100684declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
685 cortex_a76_reset_func, \
686 CPU_NO_EXTRA1_FUNC, \
687 cortex_a76_disable_wa_cve_2018_3639, \
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100688 cortex_a76_core_pwr_dwn