blob: 4def14373754b6efe53c8404bb762079a992d586 [file] [log] [blame]
Isla Mitchellea84d6b2017-08-03 16:04:46 +01001/*
John Tsichritzis4daa1de2018-07-23 09:11:59 +01002 * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
Isla Mitchellea84d6b2017-08-03 16:04:46 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
Dimitris Papastamos312e17e2018-05-16 09:59:54 +01008#include <arm_arch_svc.h>
Isla Mitchellea84d6b2017-08-03 16:04:46 +01009#include <asm_macros.S>
10#include <bl_common.h>
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010011#include <context.h>
Isla Mitchellea84d6b2017-08-03 16:04:46 +010012#include <cortex_a76.h>
13#include <cpu_macros.S>
14#include <plat_macros.S>
15
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010016#if !DYNAMIC_WORKAROUND_CVE_2018_3639
17#error Cortex A76 requires DYNAMIC_WORKAROUND_CVE_2018_3639=1
18#endif
19
20#define ESR_EL3_A64_SMC0 0x5e000000
21#define ESR_EL3_A32_SMC0 0x4e000000
22
23 /*
24 * This macro applies the mitigation for CVE-2018-3639.
25 * It implements a fash path where `SMCCC_ARCH_WORKAROUND_2`
26 * SMC calls from a lower EL running in AArch32 or AArch64
27 * will go through the fast and return early.
28 *
29 * The macro saves x2-x3 to the context. In the fast path
30 * x0-x3 registers do not need to be restored as the calling
31 * context will have saved them.
32 */
33 .macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
34 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
35
36 .if \_is_sync_exception
37 /*
38 * Ensure SMC is coming from A64/A32 state on #0
39 * with W0 = SMCCC_ARCH_WORKAROUND_2
40 *
41 * This sequence evaluates as:
42 * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
43 * allowing use of a single branch operation
44 */
45 orr w2, wzr, #SMCCC_ARCH_WORKAROUND_2
46 cmp x0, x2
47 mrs x3, esr_el3
48 mov_imm w2, \_esr_el3_val
49 ccmp w2, w3, #0, eq
50 /*
51 * Static predictor will predict a fall-through, optimizing
52 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
53 */
54 bne 1f
55
56 /*
57 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
58 * fast path.
59 */
60 cmp x1, xzr /* enable/disable check */
61
62 /*
63 * When the calling context wants mitigation disabled,
64 * we program the mitigation disable function in the
65 * CPU context, which gets invoked on subsequent exits from
66 * EL3 via the `el3_exit` function. Otherwise NULL is
67 * programmed in the CPU context, which results in caller's
68 * inheriting the EL3 mitigation state (enabled) on subsequent
69 * `el3_exit`.
70 */
71 mov x0, xzr
72 adr x1, cortex_a76_disable_wa_cve_2018_3639
73 csel x1, x1, x0, eq
74 str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
75
76 mrs x2, CORTEX_A76_CPUACTLR2_EL1
77 orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
78 bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
79 csel x3, x3, x1, eq
80 msr CORTEX_A76_CPUACTLR2_EL1, x3
81 eret /* ERET implies ISB */
82 .endif
831:
84 /*
85 * Always enable v4 mitigation during EL3 execution. This is not
86 * required for the fast path above because it does not perform any
87 * memory loads.
88 */
89 mrs x2, CORTEX_A76_CPUACTLR2_EL1
90 orr x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
91 msr CORTEX_A76_CPUACTLR2_EL1, x2
92 isb
93
94 /*
95 * The caller may have passed arguments to EL3 via x2-x3.
96 * Restore these registers from the context before jumping to the
97 * main runtime vector table entry.
98 */
99 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
100 .endm
101
102vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
103
104 /* ---------------------------------------------------------------------
105 * Current EL with SP_EL0 : 0x0 - 0x200
106 * ---------------------------------------------------------------------
107 */
108vector_entry cortex_a76_sync_exception_sp_el0
109 b sync_exception_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100110end_vector_entry cortex_a76_sync_exception_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100111
112vector_entry cortex_a76_irq_sp_el0
113 b irq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100114end_vector_entry cortex_a76_irq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100115
116vector_entry cortex_a76_fiq_sp_el0
117 b fiq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100118end_vector_entry cortex_a76_fiq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100119
120vector_entry cortex_a76_serror_sp_el0
121 b serror_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100122end_vector_entry cortex_a76_serror_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100123
124 /* ---------------------------------------------------------------------
125 * Current EL with SP_ELx: 0x200 - 0x400
126 * ---------------------------------------------------------------------
127 */
128vector_entry cortex_a76_sync_exception_sp_elx
129 b sync_exception_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100130end_vector_entry cortex_a76_sync_exception_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100131
132vector_entry cortex_a76_irq_sp_elx
133 b irq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100134end_vector_entry cortex_a76_irq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100135
136vector_entry cortex_a76_fiq_sp_elx
137 b fiq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100138end_vector_entry cortex_a76_fiq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100139
140vector_entry cortex_a76_serror_sp_elx
141 b serror_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100142end_vector_entry cortex_a76_serror_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100143
144 /* ---------------------------------------------------------------------
145 * Lower EL using AArch64 : 0x400 - 0x600
146 * ---------------------------------------------------------------------
147 */
148vector_entry cortex_a76_sync_exception_aarch64
149 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
150 b sync_exception_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100151end_vector_entry cortex_a76_sync_exception_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100152
153vector_entry cortex_a76_irq_aarch64
154 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
155 b irq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100156end_vector_entry cortex_a76_irq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100157
158vector_entry cortex_a76_fiq_aarch64
159 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
160 b fiq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100161end_vector_entry cortex_a76_fiq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100162
163vector_entry cortex_a76_serror_aarch64
164 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
165 b serror_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100166end_vector_entry cortex_a76_serror_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100167
168 /* ---------------------------------------------------------------------
169 * Lower EL using AArch32 : 0x600 - 0x800
170 * ---------------------------------------------------------------------
171 */
172vector_entry cortex_a76_sync_exception_aarch32
173 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
174 b sync_exception_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100175end_vector_entry cortex_a76_sync_exception_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100176
177vector_entry cortex_a76_irq_aarch32
178 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
179 b irq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100180end_vector_entry cortex_a76_irq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100181
182vector_entry cortex_a76_fiq_aarch32
183 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
184 b fiq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100185end_vector_entry cortex_a76_fiq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100186
187vector_entry cortex_a76_serror_aarch32
188 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
189 b serror_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100190end_vector_entry cortex_a76_serror_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100191
192func check_errata_cve_2018_3639
193#if WORKAROUND_CVE_2018_3639
194 mov x0, #ERRATA_APPLIES
195#else
196 mov x0, #ERRATA_MISSING
197#endif
198 ret
199endfunc check_errata_cve_2018_3639
200
201func cortex_a76_disable_wa_cve_2018_3639
202 mrs x0, CORTEX_A76_CPUACTLR2_EL1
203 bic x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
204 msr CORTEX_A76_CPUACTLR2_EL1, x0
205 isb
206 ret
207endfunc cortex_a76_disable_wa_cve_2018_3639
208
209func cortex_a76_reset_func
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100210 mov x19, x30
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000211
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100212#if WORKAROUND_CVE_2018_3639
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000213 /* If the PE implements SSBS, we don't need the dynamic workaround */
214 mrs x0, id_aa64pfr1_el1
215 lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
216 and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
217 cbnz x0, 1f
218
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100219 mrs x0, CORTEX_A76_CPUACTLR2_EL1
220 orr x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
221 msr CORTEX_A76_CPUACTLR2_EL1, x0
222 isb
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100223
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +0000224#ifdef IMAGE_BL31
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100225 /*
226 * The Cortex-A76 generic vectors are overwritten to use the vectors
227 * defined above. This is required in order to apply mitigation
228 * against CVE-2018-3639 on exception entry from lower ELs.
229 */
230 adr x0, cortex_a76_wa_cve_2018_3639_a76_vbar
231 msr vbar_el3, x0
232 isb
233#endif
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100234
Jeenu Viswambharanaa00aff2018-11-15 11:38:03 +00002351:
236#endif
237
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100238#if ERRATA_DSU_936184
239 bl errata_dsu_936184_wa
240#endif
241 ret x19
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100242endfunc cortex_a76_reset_func
243
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100244 /* ---------------------------------------------
245 * HW will do the cache maintenance while powering down
246 * ---------------------------------------------
247 */
248func cortex_a76_core_pwr_dwn
249 /* ---------------------------------------------
250 * Enable CPU power down bit in power control register
251 * ---------------------------------------------
252 */
253 mrs x0, CORTEX_A76_CPUPWRCTLR_EL1
254 orr x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
255 msr CORTEX_A76_CPUPWRCTLR_EL1, x0
256 isb
257 ret
258endfunc cortex_a76_core_pwr_dwn
259
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100260#if REPORT_ERRATA
261/*
262 * Errata printing function for Cortex Cortex A76. Must follow AAPCS.
263 */
264func cortex_a76_errata_report
265 stp x8, x30, [sp, #-16]!
266
267 bl cpu_get_rev_var
268 mov x8, x0
269
270 /*
271 * Report all errata. The revision-variant information is passed to
272 * checking functions of each errata.
273 */
274 report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100275 report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100276
277 ldp x8, x30, [sp], #16
278 ret
279endfunc cortex_a76_errata_report
280#endif
281
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100282 /* ---------------------------------------------
283 * This function provides cortex_a76 specific
284 * register information for crash reporting.
285 * It needs to return with x6 pointing to
286 * a list of register names in ascii and
287 * x8 - x15 having values of registers to be
288 * reported.
289 * ---------------------------------------------
290 */
291.section .rodata.cortex_a76_regs, "aS"
292cortex_a76_regs: /* The ascii list of register names to be reported */
293 .asciz "cpuectlr_el1", ""
294
295func cortex_a76_cpu_reg_dump
296 adr x6, cortex_a76_regs
297 mrs x8, CORTEX_A76_CPUECTLR_EL1
298 ret
299endfunc cortex_a76_cpu_reg_dump
300
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100301declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
302 cortex_a76_reset_func, \
303 CPU_NO_EXTRA1_FUNC, \
304 cortex_a76_disable_wa_cve_2018_3639, \
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100305 cortex_a76_core_pwr_dwn