blob: 1697c55dc87aa34a63632312226c07315f671f41 [file] [log] [blame]
Isla Mitchellea84d6b2017-08-03 16:04:46 +01001/*
John Tsichritzis4daa1de2018-07-23 09:11:59 +01002 * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
Isla Mitchellea84d6b2017-08-03 16:04:46 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
Dimitris Papastamos312e17e2018-05-16 09:59:54 +01008#include <arm_arch_svc.h>
Isla Mitchellea84d6b2017-08-03 16:04:46 +01009#include <asm_macros.S>
10#include <bl_common.h>
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010011#include <context.h>
Isla Mitchellea84d6b2017-08-03 16:04:46 +010012#include <cortex_a76.h>
13#include <cpu_macros.S>
14#include <plat_macros.S>
15
Dimitris Papastamos312e17e2018-05-16 09:59:54 +010016#if !DYNAMIC_WORKAROUND_CVE_2018_3639
17#error Cortex A76 requires DYNAMIC_WORKAROUND_CVE_2018_3639=1
18#endif
19
20#define ESR_EL3_A64_SMC0 0x5e000000
21#define ESR_EL3_A32_SMC0 0x4e000000
22
23 /*
24 * This macro applies the mitigation for CVE-2018-3639.
25 * It implements a fash path where `SMCCC_ARCH_WORKAROUND_2`
26 * SMC calls from a lower EL running in AArch32 or AArch64
27 * will go through the fast and return early.
28 *
29 * The macro saves x2-x3 to the context. In the fast path
30 * x0-x3 registers do not need to be restored as the calling
31 * context will have saved them.
32 */
33 .macro apply_cve_2018_3639_wa _is_sync_exception _esr_el3_val
34 stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
35
36 .if \_is_sync_exception
37 /*
38 * Ensure SMC is coming from A64/A32 state on #0
39 * with W0 = SMCCC_ARCH_WORKAROUND_2
40 *
41 * This sequence evaluates as:
42 * (W0==SMCCC_ARCH_WORKAROUND_2) ? (ESR_EL3==SMC#0) : (NE)
43 * allowing use of a single branch operation
44 */
45 orr w2, wzr, #SMCCC_ARCH_WORKAROUND_2
46 cmp x0, x2
47 mrs x3, esr_el3
48 mov_imm w2, \_esr_el3_val
49 ccmp w2, w3, #0, eq
50 /*
51 * Static predictor will predict a fall-through, optimizing
52 * the `SMCCC_ARCH_WORKAROUND_2` fast path.
53 */
54 bne 1f
55
56 /*
57 * The sequence below implements the `SMCCC_ARCH_WORKAROUND_2`
58 * fast path.
59 */
60 cmp x1, xzr /* enable/disable check */
61
62 /*
63 * When the calling context wants mitigation disabled,
64 * we program the mitigation disable function in the
65 * CPU context, which gets invoked on subsequent exits from
66 * EL3 via the `el3_exit` function. Otherwise NULL is
67 * programmed in the CPU context, which results in caller's
68 * inheriting the EL3 mitigation state (enabled) on subsequent
69 * `el3_exit`.
70 */
71 mov x0, xzr
72 adr x1, cortex_a76_disable_wa_cve_2018_3639
73 csel x1, x1, x0, eq
74 str x1, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE]
75
76 mrs x2, CORTEX_A76_CPUACTLR2_EL1
77 orr x1, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
78 bic x3, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
79 csel x3, x3, x1, eq
80 msr CORTEX_A76_CPUACTLR2_EL1, x3
81 eret /* ERET implies ISB */
82 .endif
831:
84 /*
85 * Always enable v4 mitigation during EL3 execution. This is not
86 * required for the fast path above because it does not perform any
87 * memory loads.
88 */
89 mrs x2, CORTEX_A76_CPUACTLR2_EL1
90 orr x2, x2, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
91 msr CORTEX_A76_CPUACTLR2_EL1, x2
92 isb
93
94 /*
95 * The caller may have passed arguments to EL3 via x2-x3.
96 * Restore these registers from the context before jumping to the
97 * main runtime vector table entry.
98 */
99 ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
100 .endm
101
102vector_base cortex_a76_wa_cve_2018_3639_a76_vbar
103
104 /* ---------------------------------------------------------------------
105 * Current EL with SP_EL0 : 0x0 - 0x200
106 * ---------------------------------------------------------------------
107 */
108vector_entry cortex_a76_sync_exception_sp_el0
109 b sync_exception_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100110end_vector_entry cortex_a76_sync_exception_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100111
112vector_entry cortex_a76_irq_sp_el0
113 b irq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100114end_vector_entry cortex_a76_irq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100115
116vector_entry cortex_a76_fiq_sp_el0
117 b fiq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100118end_vector_entry cortex_a76_fiq_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100119
120vector_entry cortex_a76_serror_sp_el0
121 b serror_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100122end_vector_entry cortex_a76_serror_sp_el0
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100123
124 /* ---------------------------------------------------------------------
125 * Current EL with SP_ELx: 0x200 - 0x400
126 * ---------------------------------------------------------------------
127 */
128vector_entry cortex_a76_sync_exception_sp_elx
129 b sync_exception_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100130end_vector_entry cortex_a76_sync_exception_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100131
132vector_entry cortex_a76_irq_sp_elx
133 b irq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100134end_vector_entry cortex_a76_irq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100135
136vector_entry cortex_a76_fiq_sp_elx
137 b fiq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100138end_vector_entry cortex_a76_fiq_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100139
140vector_entry cortex_a76_serror_sp_elx
141 b serror_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100142end_vector_entry cortex_a76_serror_sp_elx
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100143
144 /* ---------------------------------------------------------------------
145 * Lower EL using AArch64 : 0x400 - 0x600
146 * ---------------------------------------------------------------------
147 */
148vector_entry cortex_a76_sync_exception_aarch64
149 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
150 b sync_exception_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100151end_vector_entry cortex_a76_sync_exception_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100152
153vector_entry cortex_a76_irq_aarch64
154 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
155 b irq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100156end_vector_entry cortex_a76_irq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100157
158vector_entry cortex_a76_fiq_aarch64
159 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
160 b fiq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100161end_vector_entry cortex_a76_fiq_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100162
163vector_entry cortex_a76_serror_aarch64
164 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
165 b serror_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100166end_vector_entry cortex_a76_serror_aarch64
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100167
168 /* ---------------------------------------------------------------------
169 * Lower EL using AArch32 : 0x600 - 0x800
170 * ---------------------------------------------------------------------
171 */
172vector_entry cortex_a76_sync_exception_aarch32
173 apply_cve_2018_3639_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
174 b sync_exception_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100175end_vector_entry cortex_a76_sync_exception_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100176
177vector_entry cortex_a76_irq_aarch32
178 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
179 b irq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100180end_vector_entry cortex_a76_irq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100181
182vector_entry cortex_a76_fiq_aarch32
183 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
184 b fiq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100185end_vector_entry cortex_a76_fiq_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100186
187vector_entry cortex_a76_serror_aarch32
188 apply_cve_2018_3639_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
189 b serror_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100190end_vector_entry cortex_a76_serror_aarch32
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100191
192func check_errata_cve_2018_3639
193#if WORKAROUND_CVE_2018_3639
194 mov x0, #ERRATA_APPLIES
195#else
196 mov x0, #ERRATA_MISSING
197#endif
198 ret
199endfunc check_errata_cve_2018_3639
200
201func cortex_a76_disable_wa_cve_2018_3639
202 mrs x0, CORTEX_A76_CPUACTLR2_EL1
203 bic x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
204 msr CORTEX_A76_CPUACTLR2_EL1, x0
205 isb
206 ret
207endfunc cortex_a76_disable_wa_cve_2018_3639
208
209func cortex_a76_reset_func
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100210 mov x19, x30
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100211#if WORKAROUND_CVE_2018_3639
212 mrs x0, CORTEX_A76_CPUACTLR2_EL1
213 orr x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
214 msr CORTEX_A76_CPUACTLR2_EL1, x0
215 isb
216#endif
217
218#if IMAGE_BL31 && WORKAROUND_CVE_2018_3639
219 /*
220 * The Cortex-A76 generic vectors are overwritten to use the vectors
221 * defined above. This is required in order to apply mitigation
222 * against CVE-2018-3639 on exception entry from lower ELs.
223 */
224 adr x0, cortex_a76_wa_cve_2018_3639_a76_vbar
225 msr vbar_el3, x0
226 isb
227#endif
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100228
229#if ERRATA_DSU_936184
230 bl errata_dsu_936184_wa
231#endif
232 ret x19
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100233endfunc cortex_a76_reset_func
234
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100235 /* ---------------------------------------------
236 * HW will do the cache maintenance while powering down
237 * ---------------------------------------------
238 */
239func cortex_a76_core_pwr_dwn
240 /* ---------------------------------------------
241 * Enable CPU power down bit in power control register
242 * ---------------------------------------------
243 */
244 mrs x0, CORTEX_A76_CPUPWRCTLR_EL1
245 orr x0, x0, #CORTEX_A76_CORE_PWRDN_EN_MASK
246 msr CORTEX_A76_CPUPWRCTLR_EL1, x0
247 isb
248 ret
249endfunc cortex_a76_core_pwr_dwn
250
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100251#if REPORT_ERRATA
252/*
253 * Errata printing function for Cortex Cortex A76. Must follow AAPCS.
254 */
255func cortex_a76_errata_report
256 stp x8, x30, [sp, #-16]!
257
258 bl cpu_get_rev_var
259 mov x8, x0
260
261 /*
262 * Report all errata. The revision-variant information is passed to
263 * checking functions of each errata.
264 */
265 report_errata WORKAROUND_CVE_2018_3639, cortex_a76, cve_2018_3639
John Tsichritzis4daa1de2018-07-23 09:11:59 +0100266 report_errata ERRATA_DSU_936184, cortex_a76, dsu_936184
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100267
268 ldp x8, x30, [sp], #16
269 ret
270endfunc cortex_a76_errata_report
271#endif
272
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100273 /* ---------------------------------------------
274 * This function provides cortex_a76 specific
275 * register information for crash reporting.
276 * It needs to return with x6 pointing to
277 * a list of register names in ascii and
278 * x8 - x15 having values of registers to be
279 * reported.
280 * ---------------------------------------------
281 */
282.section .rodata.cortex_a76_regs, "aS"
283cortex_a76_regs: /* The ascii list of register names to be reported */
284 .asciz "cpuectlr_el1", ""
285
286func cortex_a76_cpu_reg_dump
287 adr x6, cortex_a76_regs
288 mrs x8, CORTEX_A76_CPUECTLR_EL1
289 ret
290endfunc cortex_a76_cpu_reg_dump
291
Dimitris Papastamos312e17e2018-05-16 09:59:54 +0100292declare_cpu_ops_wa cortex_a76, CORTEX_A76_MIDR, \
293 cortex_a76_reset_func, \
294 CPU_NO_EXTRA1_FUNC, \
295 cortex_a76_disable_wa_cve_2018_3639, \
Isla Mitchellea84d6b2017-08-03 16:04:46 +0100296 cortex_a76_core_pwr_dwn