blob: 4bf6e77a9cf060895e65b06e1ad2fe4ed8de4119 [file] [log] [blame]
Isla Mitchellea84d6b2017-08-03 16:04:46 +01001/*
Louis Mayencourt09924472019-02-21 17:35:07 +00002 * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
Isla Mitchellea84d6b2017-08-03 16:04:46 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00009#include <common/bl_common.h>
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010010#include <context.h>
Isla Mitchellea84d6b2017-08-03 16:04:46 +010011#include <cortex_a76.h>
12#include <cpu_macros.S>
13#include <plat_macros.S>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000014#include <services/arm_arch_svc.h>
Isla Mitchellea84d6b2017-08-03 16:04:46 +010015
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010016#define ESR_EL3_A64_SMC0 0x5e000000
17#define ESR_EL3_A32_SMC0 0x4e000000
18
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +000019#if DYNAMIC_WORKAROUND_CVE_2018_3639
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010020 /*
21 * This macro applies the mitigation for CVE-2018-3639.
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000022 * It implements a fast path where `SMCCC_ARCH_WORKAROUND_2`
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010023 * SMC calls from a lower EL running in AArch32 or AArch64
24 * will go through the fast and return early.
25 *
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000026 * The macro saves x2-x3 to the context. In the fast path
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010027 * x0-x3 registers do not need to be restored as the calling
28 * context will have saved them.
29 */
30 .macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
31 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
32
33 .if \_is_sync_exception
34 /*
35 * Ensure SMC is coming from A64/A32 state on #0
36 * with W0 = SMCCC_ARCH_WORKAROUND_2
37 *
38 * This sequence evaluates as:
39 * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
40 * allowing use of a single branch operation
41 */
42 orr w2, wzr, #SMCCC_ARCH_WORKAROUND_2
43 cmp x0, x2
44 mrs x3, esr_el3
45 mov_imm w2, \_esr_el3_val
46 ccmp w2, w3, #0, eq
47 /*
48 * Static predictor will predict a fall-through, optimizing
49 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
50 */
51 bne 1f
52
53 /*
54 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
55 * fast path.
56 */
57 cmp x1, xzr /* enable/disable check */
58
59 /*
60 * When the calling context wants mitigation disabled,
61 * we program the mitigation disable function in the
62 * CPU context, which gets invoked on subsequent exits from
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000063 * EL3 via the `el3_exit` function. Otherwise NULL is
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010064 * programmed in the CPU context, which results in caller's
65 * inheriting the EL3 mitigation state (enabled) on subsequent
66 * `el3_exit`.
67 */
68 mov x0, xzr
69 adr x1, cortex_a76_disable_wa_cve_2018_3639
70 csel x1, x1, x0, eq
71 str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
72
73 mrs x2, CORTEX_A76_CPUACTLR2_EL1
74 orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
75 bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
76 csel x3, x3, x1, eq
77 msr CORTEX_A76_CPUACTLR2_EL1, x3
78 eret /* ERET implies ISB */
79 .endif
801:
81 /*
Ambroise Vincent6dbbe432019-03-07 14:31:33 +000082 * Always enable v4 mitigation during EL3 execution. This is not
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010083 * required for the fast path above because it does not perform any
84 * memory loads.
85 */
86 mrs x2, CORTEX_A76_CPUACTLR2_EL1
87 orr x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
88 msr CORTEX_A76_CPUACTLR2_EL1, x2
89 isb
90
91 /*
92 * The caller may have passed arguments to EL3 via x2-x3.
93 * Restore these registers from the context before jumping to the
94 * main runtime vector table entry.
95 */
96 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
97 .endm
98
99vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
100
101 /* ---------------------------------------------------------------------
102 * Current EL with SP_EL0 : 0x0 - 0x200
103 * ---------------------------------------------------------------------
104 */
105vector_entry cortex_a76_sync_exception_sp_el0
106 b sync_exception_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100107end_vector_entry cortex_a76_sync_exception_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100108
109vector_entry cortex_a76_irq_sp_el0
110 b irq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100111end_vector_entry cortex_a76_irq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100112
113vector_entry cortex_a76_fiq_sp_el0
114 b fiq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100115end_vector_entry cortex_a76_fiq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100116
117vector_entry cortex_a76_serror_sp_el0
118 b serror_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100119end_vector_entry cortex_a76_serror_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100120
121 /* ---------------------------------------------------------------------
122 * Current EL with SP_ELx: 0x200 - 0x400
123 * ---------------------------------------------------------------------
124 */
125vector_entry cortex_a76_sync_exception_sp_elx
126 b sync_exception_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100127end_vector_entry cortex_a76_sync_exception_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100128
129vector_entry cortex_a76_irq_sp_elx
130 b irq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100131end_vector_entry cortex_a76_irq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100132
133vector_entry cortex_a76_fiq_sp_elx
134 b fiq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100135end_vector_entry cortex_a76_fiq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100136
137vector_entry cortex_a76_serror_sp_elx
138 b serror_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100139end_vector_entry cortex_a76_serror_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100140
141 /* ---------------------------------------------------------------------
142 * Lower EL using AArch64 : 0x400 - 0x600
143 * ---------------------------------------------------------------------
144 */
145vector_entry cortex_a76_sync_exception_aarch64
146 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
147 b sync_exception_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100148end_vector_entry cortex_a76_sync_exception_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100149
150vector_entry cortex_a76_irq_aarch64
151 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
152 b irq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100153end_vector_entry cortex_a76_irq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100154
155vector_entry cortex_a76_fiq_aarch64
156 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
157 b fiq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100158end_vector_entry cortex_a76_fiq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100159
160vector_entry cortex_a76_serror_aarch64
161 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
162 b serror_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100163end_vector_entry cortex_a76_serror_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100164
165 /* ---------------------------------------------------------------------
166 * Lower EL using AArch32 : 0x600 - 0x800
167 * ---------------------------------------------------------------------
168 */
169vector_entry cortex_a76_sync_exception_aarch32
170 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
171 b sync_exception_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100172end_vector_entry cortex_a76_sync_exception_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100173
174vector_entry cortex_a76_irq_aarch32
175 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
176 b irq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100177end_vector_entry cortex_a76_irq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100178
179vector_entry cortex_a76_fiq_aarch32
180 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
181 b fiq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100182end_vector_entry cortex_a76_fiq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100183
184vector_entry cortex_a76_serror_aarch32
185 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
186 b serror_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100187end_vector_entry cortex_a76_serror_aarch32
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000188#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100189
Louis Mayencourt09924472019-02-21 17:35:07 +0000190 /* --------------------------------------------------
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000191 * Errata Workaround for Cortex A76 Errata #1073348.
192 * This applies only to revision <= r1p0 of Cortex A76.
193 * Inputs:
194 * x0: variant[4:7] and revision[0:3] of current cpu.
195 * Shall clobber: x0-x17
196 * --------------------------------------------------
197 */
198func errata_a76_1073348_wa
199 /*
200 * Compare x0 against revision r1p0
201 */
202 mov x17, x30
203 bl check_errata_1073348
204 cbz x0, 1f
205 mrs x1, CORTEX_A76_CPUACTLR_EL1
206 orr x1, x1 ,#CORTEX_A76_CPUACTLR_EL1_DISABLE_STATIC_PREDICTION
207 msr CORTEX_A76_CPUACTLR_EL1, x1
208 isb
2091:
210 ret x17
211 endfunc errata_a76_1073348_wa
212
213func check_errata_1073348
214 mov x1, #0x10
215 b cpu_rev_var_ls
216endfunc check_errata_1073348
217
218 /* --------------------------------------------------
Louis Mayencourt09924472019-02-21 17:35:07 +0000219 * Errata Workaround for Cortex A76 Errata #1130799.
220 * This applies only to revision <= r2p0 of Cortex A76.
221 * Inputs:
222 * x0: variant[4:7] and revision[0:3] of current cpu.
223 * Shall clobber: x0-x17
224 * --------------------------------------------------
225 */
226func errata_a76_1130799_wa
227 /*
228 * Compare x0 against revision r2p0
229 */
230 mov x17, x30
231 bl check_errata_1130799
232 cbz x0, 1f
233 mrs x1, CORTEX_A76_CPUACTLR2_EL1
234 orr x1, x1 ,#(1 << 59)
235 msr CORTEX_A76_CPUACTLR2_EL1, x1
236 isb
2371:
238 ret x17
239endfunc errata_a76_1130799_wa
240
241func check_errata_1130799
242 mov x1, #0x20
243 b cpu_rev_var_ls
244endfunc check_errata_1130799
245
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000246 /* --------------------------------------------------
247 * Errata Workaround for Cortex A76 Errata #1220197.
248 * This applies only to revision <= r2p0 of Cortex A76.
249 * Inputs:
250 * x0: variant[4:7] and revision[0:3] of current cpu.
251 * Shall clobber: x0-x17
252 * --------------------------------------------------
253 */
254func errata_a76_1220197_wa
255/*
256 * Compare x0 against revision r2p0
257 */
258 mov x17, x30
259 bl check_errata_1220197
260 cbz x0, 1f
261 mrs x1, CORTEX_A76_CPUECTLR_EL1
262 orr x1, x1, #CORTEX_A76_CPUECTLR_EL1_WS_THR_L2
263 msr CORTEX_A76_CPUECTLR_EL1, x1
264 isb
2651:
266 ret x17
267endfunc errata_a76_1220197_wa
268
269func check_errata_1220197
270 mov x1, #0x20
271 b cpu_rev_var_ls
272endfunc check_errata_1220197
273
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100274func check_errata_cve_2018_3639
275#if WORKAROUND_CVE_2018_3639
276 mov x0, #ERRATA_APPLIES
277#else
278 mov x0, #ERRATA_MISSING
279#endif
280 ret
281endfunc check_errata_cve_2018_3639
282
283func cortex_a76_disable_wa_cve_2018_3639
284 mrs x0, CORTEX_A76_CPUACTLR2_EL1
285 bic x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
286 msr CORTEX_A76_CPUACTLR2_EL1, x0
287 isb
288 ret
289endfunc cortex_a76_disable_wa_cve_2018_3639
290
Louis Mayencourt09924472019-02-21 17:35:07 +0000291 /* -------------------------------------------------
292 * The CPU Ops reset function for Cortex-A76.
293 * Shall clobber: x0-x19
294 * -------------------------------------------------
295 */
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100296func cortex_a76_reset_func
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100297 mov x19, x30
Louis Mayencourt09924472019-02-21 17:35:07 +0000298 bl cpu_get_rev_var
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000299 mov x18, x0
Louis Mayencourt09924472019-02-21 17:35:07 +0000300
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000301#if ERRATA_A76_1073348
302 mov x0, x18
303 bl errata_a76_1073348_wa
304#endif
305
Louis Mayencourt09924472019-02-21 17:35:07 +0000306#if ERRATA_A76_1130799
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000307 mov x0, x18
Louis Mayencourt09924472019-02-21 17:35:07 +0000308 bl errata_a76_1130799_wa
309#endif
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000310
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000311#if ERRATA_A76_1220197
312 mov x0, x18
313 bl errata_a76_1220197_wa
314#endif
315
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100316#if WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000317 /* If the PE implements SSBS, we don't need the dynamic workaround */
318 mrs x0, id_aa64pfr1_el1
319 lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
Ambroise Vincent6dbbe432019-03-07 14:31:33 +0000320 and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000321#if !DYNAMIC_WORKAROUND_CVE_2018_3639 && ENABLE_ASSERTIONS
322 cmp x0, 0
323 ASM_ASSERT(ne)
324#endif
325#if DYNAMIC_WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000326 cbnz x0, 1f
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100327 mrs x0, CORTEX_A76_CPUACTLR2_EL1
328 orr x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
329 msr CORTEX_A76_CPUACTLR2_EL1, x0
330 isb
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100331
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000332#ifdef IMAGE_BL31
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100333 /*
334 * The Cortex-A76 generic vectors are overwritten to use the vectors
Ambroise Vincent6dbbe432019-03-07 14:31:33 +0000335 * defined above. This is required in order to apply mitigation
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100336 * against CVE-2018-3639 on exception entry from lower ELs.
337 */
338 adr x0, cortex_a76_wa_cve_2018_3639_a76_vbar
339 msr vbar_el3, x0
340 isb
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000341#endif /* IMAGE_BL31 */
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100342
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +00003431:
Ambroise Vincent8c6fdf82019-03-07 14:33:02 +0000344#endif /* DYNAMIC_WORKAROUND_CVE_2018_3639 */
345#endif /* WORKAROUND_CVE_2018_3639 */
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000346
Louis Mayencourt4498b152019-04-09 16:29:01 +0100347#if ERRATA_DSU_798953
348 bl errata_dsu_798953_wa
349#endif
350
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100351#if ERRATA_DSU_936184
352 bl errata_dsu_936184_wa
353#endif
Louis Mayencourt4498b152019-04-09 16:29:01 +0100354
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100355 ret x19
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100356endfunc cortex_a76_reset_func
357
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100358 /* ---------------------------------------------
359 * HW will do the cache maintenance while powering down
360 * ---------------------------------------------
361 */
362func cortex_a76_core_pwr_dwn
363 /* ---------------------------------------------
364 * Enable CPU power down bit in power control register
365 * ---------------------------------------------
366 */
367 mrs x0, CORTEX_A76_CPUPWRCTLR_EL1
368 orr x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
369 msr CORTEX_A76_CPUPWRCTLR_EL1, x0
370 isb
371 ret
372endfunc cortex_a76_core_pwr_dwn
373
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100374#if REPORT_ERRATA
375/*
Louis Mayencourt4498b152019-04-09 16:29:01 +0100376 * Errata printing function for Cortex A76. Must follow AAPCS.
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100377 */
378func cortex_a76_errata_report
379 stp x8, x30, [sp, #-16]!
380
381 bl cpu_get_rev_var
382 mov x8, x0
383
384 /*
385 * Report all errata. The revision-variant information is passed to
386 * checking functions of each errata.
387 */
Louis Mayencourt59fa2182019-02-25 15:17:44 +0000388 report_errata ERRATA_A76_1073348, cortex_a76, 1073348
Louis Mayencourt09924472019-02-21 17:35:07 +0000389 report_errata ERRATA_A76_1130799, cortex_a76, 1130799
Louis Mayencourtadda9d42019-02-25 11:37:38 +0000390 report_errata ERRATA_A76_1220197, cortex_a76, 1220197
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100391 report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
Louis Mayencourt4498b152019-04-09 16:29:01 +0100392 report_errata ERRATA_DSU_798953, cortex_a76, dsu_798953
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100393 report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100394
395 ldp x8, x30, [sp], #16
396 ret
397endfunc cortex_a76_errata_report
398#endif
399
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100400 /* ---------------------------------------------
401 * This function provides cortex_a76 specific
402 * register information for crash reporting.
403 * It needs to return with x6 pointing to
404 * a list of register names in ascii and
405 * x8 - x15 having values of registers to be
406 * reported.
407 * ---------------------------------------------
408 */
409.section .rodata.cortex_a76_regs, "aS"
410cortex_a76_regs: /* The ascii list of register names to be reported */
411 .asciz "cpuectlr_el1", ""
412
413func cortex_a76_cpu_reg_dump
414 adr x6, cortex_a76_regs
415 mrs x8, CORTEX_A76_CPUECTLR_EL1
416 ret
417endfunc cortex_a76_cpu_reg_dump
418
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100419declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
420 cortex_a76_reset_func, \
421 CPU_NO_EXTRA1_FUNC, \
422 cortex_a76_disable_wa_cve_2018_3639, \
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100423 cortex_a76_core_pwr_dwn