blob: d662e7f896a26e590ec630be66d1b05aee78d6dd [file] [log] [blame]
Varun Wadekar28463b92015-07-14 17:11:20 +05301/*
Varun Wadekare34bd092018-01-10 17:03:22 -08002 * Copyright (c) 2015-2018, ARM Limited and Contributors. All rights reserved.
Varun Wadekar88d0f062020-05-24 16:26:22 -07003 * Copyright (c) 2020, NVIDIA Corporation. All rights reserved.
Varun Wadekar28463b92015-07-14 17:11:20 +05304 *
dp-armfa3cf0b2017-05-03 09:38:09 +01005 * SPDX-License-Identifier: BSD-3-Clause
Varun Wadekar28463b92015-07-14 17:11:20 +05306 */
7
8#include <arch.h>
9#include <asm_macros.S>
10#include <assert_macros.S>
Varun Wadekare34bd092018-01-10 17:03:22 -080011#include <context.h>
Varun Wadekar28463b92015-07-14 17:11:20 +053012#include <denver.h>
13#include <cpu_macros.S>
14#include <plat_macros.S>
15
Varun Wadekare34bd092018-01-10 17:03:22 -080016 /* -------------------------------------------------
17 * CVE-2017-5715 mitigation
18 *
19 * Flush the indirect branch predictor and RSB on
20 * entry to EL3 by issuing a newly added instruction
21 * for Denver CPUs.
22 *
23 * To achieve this without performing any branch
24 * instruction, a per-cpu vbar is installed which
25 * executes the workaround and then branches off to
26 * the corresponding vector entry in the main vector
27 * table.
28 * -------------------------------------------------
29 */
Varun Wadekare34bd092018-01-10 17:03:22 -080030vector_base workaround_bpflush_runtime_exceptions
31
32 .macro apply_workaround
33 stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
34
Varun Wadekar88d0f062020-05-24 16:26:22 -070035 /* Disable cycle counter when event counting is prohibited */
36 mrs x1, pmcr_el0
37 orr x0, x1, #PMCR_EL0_DP_BIT
38 msr pmcr_el0, x0
39 isb
40
Varun Wadekare34bd092018-01-10 17:03:22 -080041 /* -------------------------------------------------
42 * A new write-only system register where a write of
43 * 1 to bit 0 will cause the indirect branch predictor
44 * and RSB to be flushed.
45 *
46 * A write of 0 to bit 0 will be ignored. A write of
47 * 1 to any other bit will cause an MCA.
48 * -------------------------------------------------
49 */
50 mov x0, #1
51 msr s3_0_c15_c0_6, x0
52 isb
53
54 ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
55 .endm
56
57 /* ---------------------------------------------------------------------
58 * Current EL with SP_EL0 : 0x0 - 0x200
59 * ---------------------------------------------------------------------
60 */
61vector_entry workaround_bpflush_sync_exception_sp_el0
62 b sync_exception_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010063end_vector_entry workaround_bpflush_sync_exception_sp_el0
Varun Wadekare34bd092018-01-10 17:03:22 -080064
65vector_entry workaround_bpflush_irq_sp_el0
66 b irq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010067end_vector_entry workaround_bpflush_irq_sp_el0
Varun Wadekare34bd092018-01-10 17:03:22 -080068
69vector_entry workaround_bpflush_fiq_sp_el0
70 b fiq_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010071end_vector_entry workaround_bpflush_fiq_sp_el0
Varun Wadekare34bd092018-01-10 17:03:22 -080072
73vector_entry workaround_bpflush_serror_sp_el0
74 b serror_sp_el0
Roberto Vargas95f30ab2018-04-17 11:31:43 +010075end_vector_entry workaround_bpflush_serror_sp_el0
Varun Wadekare34bd092018-01-10 17:03:22 -080076
77 /* ---------------------------------------------------------------------
78 * Current EL with SP_ELx: 0x200 - 0x400
79 * ---------------------------------------------------------------------
80 */
81vector_entry workaround_bpflush_sync_exception_sp_elx
82 b sync_exception_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +010083end_vector_entry workaround_bpflush_sync_exception_sp_elx
Varun Wadekare34bd092018-01-10 17:03:22 -080084
85vector_entry workaround_bpflush_irq_sp_elx
86 b irq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +010087end_vector_entry workaround_bpflush_irq_sp_elx
Varun Wadekare34bd092018-01-10 17:03:22 -080088
89vector_entry workaround_bpflush_fiq_sp_elx
90 b fiq_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +010091end_vector_entry workaround_bpflush_fiq_sp_elx
Varun Wadekare34bd092018-01-10 17:03:22 -080092
93vector_entry workaround_bpflush_serror_sp_elx
94 b serror_sp_elx
Roberto Vargas95f30ab2018-04-17 11:31:43 +010095end_vector_entry workaround_bpflush_serror_sp_elx
Varun Wadekare34bd092018-01-10 17:03:22 -080096
97 /* ---------------------------------------------------------------------
98 * Lower EL using AArch64 : 0x400 - 0x600
99 * ---------------------------------------------------------------------
100 */
101vector_entry workaround_bpflush_sync_exception_aarch64
102 apply_workaround
103 b sync_exception_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100104end_vector_entry workaround_bpflush_sync_exception_aarch64
Varun Wadekare34bd092018-01-10 17:03:22 -0800105
106vector_entry workaround_bpflush_irq_aarch64
107 apply_workaround
108 b irq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100109end_vector_entry workaround_bpflush_irq_aarch64
Varun Wadekare34bd092018-01-10 17:03:22 -0800110
111vector_entry workaround_bpflush_fiq_aarch64
112 apply_workaround
113 b fiq_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100114end_vector_entry workaround_bpflush_fiq_aarch64
Varun Wadekare34bd092018-01-10 17:03:22 -0800115
116vector_entry workaround_bpflush_serror_aarch64
117 apply_workaround
118 b serror_aarch64
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100119end_vector_entry workaround_bpflush_serror_aarch64
Varun Wadekare34bd092018-01-10 17:03:22 -0800120
121 /* ---------------------------------------------------------------------
122 * Lower EL using AArch32 : 0x600 - 0x800
123 * ---------------------------------------------------------------------
124 */
125vector_entry workaround_bpflush_sync_exception_aarch32
126 apply_workaround
127 b sync_exception_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100128end_vector_entry workaround_bpflush_sync_exception_aarch32
Varun Wadekare34bd092018-01-10 17:03:22 -0800129
130vector_entry workaround_bpflush_irq_aarch32
131 apply_workaround
132 b irq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100133end_vector_entry workaround_bpflush_irq_aarch32
Varun Wadekare34bd092018-01-10 17:03:22 -0800134
135vector_entry workaround_bpflush_fiq_aarch32
136 apply_workaround
137 b fiq_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100138end_vector_entry workaround_bpflush_fiq_aarch32
Varun Wadekare34bd092018-01-10 17:03:22 -0800139
140vector_entry workaround_bpflush_serror_aarch32
141 apply_workaround
142 b serror_aarch32
Roberto Vargas95f30ab2018-04-17 11:31:43 +0100143end_vector_entry workaround_bpflush_serror_aarch32
Varun Wadekare34bd092018-01-10 17:03:22 -0800144
Varun Wadekard43583c2016-02-22 11:09:41 -0800145 .global denver_disable_dco
146
Varun Wadekar28463b92015-07-14 17:11:20 +0530147 /* ---------------------------------------------
148 * Disable debug interfaces
149 * ---------------------------------------------
150 */
151func denver_disable_ext_debug
152 mov x0, #1
153 msr osdlr_el1, x0
154 isb
155 dsb sy
156 ret
157endfunc denver_disable_ext_debug
158
159 /* ----------------------------------------------------
160 * Enable dynamic code optimizer (DCO)
161 * ----------------------------------------------------
162 */
163func denver_enable_dco
Kalyani Chidambaram892fff92018-10-08 17:01:01 -0700164 mov x18, x30
Varun Wadekar007a2062018-02-27 18:30:31 -0800165 bl plat_my_core_pos
Varun Wadekar28463b92015-07-14 17:11:20 +0530166 mov x1, #1
167 lsl x1, x1, x0
168 msr s3_0_c15_c0_2, x1
Kalyani Chidambaram892fff92018-10-08 17:01:01 -0700169 mov x30, x18
Varun Wadekar28463b92015-07-14 17:11:20 +0530170 ret
171endfunc denver_enable_dco
172
173 /* ----------------------------------------------------
174 * Disable dynamic code optimizer (DCO)
175 * ----------------------------------------------------
176 */
177func denver_disable_dco
178
Kalyani Chidambaram892fff92018-10-08 17:01:01 -0700179 mov x18, x30
Varun Wadekar007a2062018-02-27 18:30:31 -0800180
Varun Wadekar28463b92015-07-14 17:11:20 +0530181 /* turn off background work */
Varun Wadekar007a2062018-02-27 18:30:31 -0800182 bl plat_my_core_pos
Varun Wadekar28463b92015-07-14 17:11:20 +0530183 mov x1, #1
184 lsl x1, x1, x0
185 lsl x2, x1, #16
186 msr s3_0_c15_c0_2, x2
187 isb
188
189 /* wait till the background work turns off */
1901: mrs x2, s3_0_c15_c0_2
191 lsr x2, x2, #32
192 and w2, w2, 0xFFFF
193 and x2, x2, x1
194 cbnz x2, 1b
195
Kalyani Chidambaram892fff92018-10-08 17:01:01 -0700196 mov x30, x18
Varun Wadekar28463b92015-07-14 17:11:20 +0530197 ret
198endfunc denver_disable_dco
199
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700200func check_errata_cve_2017_5715
201 mov x0, #ERRATA_MISSING
202#if WORKAROUND_CVE_2017_5715
203 /*
204 * Check if the CPU supports the special instruction
205 * required to flush the indirect branch predictor and
206 * RSB. Support for this operation can be determined by
207 * comparing bits 19:16 of ID_AFR0_EL1 with 0b0001.
208 */
209 mrs x1, id_afr0_el1
210 mov x2, #0x10000
211 and x1, x1, x2
212 cbz x1, 1f
213 mov x0, #ERRATA_APPLIES
2141:
215#endif
216 ret
217endfunc check_errata_cve_2017_5715
218
Varun Wadekarcd38e6e2018-08-28 09:11:30 -0700219func check_errata_cve_2018_3639
220#if WORKAROUND_CVE_2018_3639
221 mov x0, #ERRATA_APPLIES
222#else
223 mov x0, #ERRATA_MISSING
224#endif
225 ret
226endfunc check_errata_cve_2018_3639
227
Varun Wadekar28463b92015-07-14 17:11:20 +0530228 /* -------------------------------------------------
229 * The CPU Ops reset function for Denver.
230 * -------------------------------------------------
231 */
232func denver_reset_func
233
234 mov x19, x30
235
Varun Wadekare34bd092018-01-10 17:03:22 -0800236#if IMAGE_BL31 && WORKAROUND_CVE_2017_5715
237 /*
238 * Check if the CPU supports the special instruction
239 * required to flush the indirect branch predictor and
240 * RSB. Support for this operation can be determined by
241 * comparing bits 19:16 of ID_AFR0_EL1 with 0b0001.
242 */
243 mrs x0, id_afr0_el1
244 mov x1, #0x10000
245 and x0, x0, x1
246 cmp x0, #0
247 adr x1, workaround_bpflush_runtime_exceptions
248 mrs x2, vbar_el3
249 csel x0, x1, x2, ne
250 msr vbar_el3, x0
251#endif
252
Varun Wadekarcd38e6e2018-08-28 09:11:30 -0700253#if WORKAROUND_CVE_2018_3639
254 /*
255 * Denver CPUs with DENVER_MIDR_PN3 or earlier, use different
256 * bits in the ACTLR_EL3 register to disable speculative
257 * store buffer and memory disambiguation.
258 */
259 mrs x0, midr_el1
260 mov_imm x1, DENVER_MIDR_PN4
261 cmp x0, x1
262 mrs x0, actlr_el3
263 mov x1, #(DENVER_CPU_DIS_MD_EL3 | DENVER_CPU_DIS_SSB_EL3)
264 mov x2, #(DENVER_PN4_CPU_DIS_MD_EL3 | DENVER_PN4_CPU_DIS_SSB_EL3)
265 csel x3, x1, x2, ne
266 orr x0, x0, x3
267 msr actlr_el3, x0
268 isb
269 dsb sy
270#endif
271
Varun Wadekar28463b92015-07-14 17:11:20 +0530272 /* ----------------------------------------------------
Varun Wadekar2b914122018-06-25 11:36:47 -0700273 * Reset ACTLR.PMSTATE to C1 state
274 * ----------------------------------------------------
275 */
276 mrs x0, actlr_el1
277 bic x0, x0, #DENVER_CPU_PMSTATE_MASK
278 orr x0, x0, #DENVER_CPU_PMSTATE_C1
279 msr actlr_el1, x0
280
281 /* ----------------------------------------------------
Varun Wadekar28463b92015-07-14 17:11:20 +0530282 * Enable dynamic code optimizer (DCO)
283 * ----------------------------------------------------
284 */
285 bl denver_enable_dco
286
287 ret x19
288endfunc denver_reset_func
289
290 /* ----------------------------------------------------
291 * The CPU Ops core power down function for Denver.
292 * ----------------------------------------------------
293 */
294func denver_core_pwr_dwn
295
296 mov x19, x30
297
Varun Wadekar28463b92015-07-14 17:11:20 +0530298 /* ---------------------------------------------
299 * Force the debug interfaces to be quiescent
300 * ---------------------------------------------
301 */
302 bl denver_disable_ext_debug
303
304 ret x19
305endfunc denver_core_pwr_dwn
306
307 /* -------------------------------------------------------
308 * The CPU Ops cluster power down function for Denver.
309 * -------------------------------------------------------
310 */
311func denver_cluster_pwr_dwn
312 ret
313endfunc denver_cluster_pwr_dwn
314
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700315#if REPORT_ERRATA
316 /*
317 * Errata printing function for Denver. Must follow AAPCS.
318 */
319func denver_errata_report
320 stp x8, x30, [sp, #-16]!
321
322 bl cpu_get_rev_var
323 mov x8, x0
324
325 /*
326 * Report all errata. The revision-variant information is passed to
327 * checking functions of each errata.
328 */
329 report_errata WORKAROUND_CVE_2017_5715, denver, cve_2017_5715
Varun Wadekarcd38e6e2018-08-28 09:11:30 -0700330 report_errata WORKAROUND_CVE_2018_3639, denver, cve_2018_3639
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700331
332 ldp x8, x30, [sp], #16
333 ret
334endfunc denver_errata_report
335#endif
336
Varun Wadekar28463b92015-07-14 17:11:20 +0530337 /* ---------------------------------------------
338 * This function provides Denver specific
339 * register information for crash reporting.
340 * It needs to return with x6 pointing to
341 * a list of register names in ascii and
342 * x8 - x15 having values of registers to be
343 * reported.
344 * ---------------------------------------------
345 */
346.section .rodata.denver_regs, "aS"
347denver_regs: /* The ascii list of register names to be reported */
348 .asciz "actlr_el1", ""
349
350func denver_cpu_reg_dump
351 adr x6, denver_regs
352 mrs x8, ACTLR_EL1
353 ret
354endfunc denver_cpu_reg_dump
355
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700356declare_cpu_ops_wa denver, DENVER_MIDR_PN0, \
Varun Wadekar3c337a62015-09-03 17:15:06 +0530357 denver_reset_func, \
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700358 check_errata_cve_2017_5715, \
359 CPU_NO_EXTRA2_FUNC, \
Varun Wadekar3c337a62015-09-03 17:15:06 +0530360 denver_core_pwr_dwn, \
361 denver_cluster_pwr_dwn
362
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700363declare_cpu_ops_wa denver, DENVER_MIDR_PN1, \
Varun Wadekar3c337a62015-09-03 17:15:06 +0530364 denver_reset_func, \
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700365 check_errata_cve_2017_5715, \
366 CPU_NO_EXTRA2_FUNC, \
Varun Wadekar3c337a62015-09-03 17:15:06 +0530367 denver_core_pwr_dwn, \
368 denver_cluster_pwr_dwn
369
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700370declare_cpu_ops_wa denver, DENVER_MIDR_PN2, \
Varun Wadekar3c337a62015-09-03 17:15:06 +0530371 denver_reset_func, \
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700372 check_errata_cve_2017_5715, \
373 CPU_NO_EXTRA2_FUNC, \
Varun Wadekar3c337a62015-09-03 17:15:06 +0530374 denver_core_pwr_dwn, \
375 denver_cluster_pwr_dwn
376
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700377declare_cpu_ops_wa denver, DENVER_MIDR_PN3, \
Varun Wadekar3c337a62015-09-03 17:15:06 +0530378 denver_reset_func, \
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700379 check_errata_cve_2017_5715, \
380 CPU_NO_EXTRA2_FUNC, \
Varun Wadekar3c337a62015-09-03 17:15:06 +0530381 denver_core_pwr_dwn, \
382 denver_cluster_pwr_dwn
383
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700384declare_cpu_ops_wa denver, DENVER_MIDR_PN4, \
Jeenu Viswambharanee5eb802016-11-18 12:58:28 +0000385 denver_reset_func, \
Varun Wadekarbc242fa2018-07-06 13:39:52 -0700386 check_errata_cve_2017_5715, \
387 CPU_NO_EXTRA2_FUNC, \
Jeenu Viswambharanee5eb802016-11-18 12:58:28 +0000388 denver_core_pwr_dwn, \
389 denver_cluster_pwr_dwn
Alex Van Brunt5f68fa72019-07-23 10:00:42 -0700390
391declare_cpu_ops_wa denver, DENVER_MIDR_PN5, \
392 denver_reset_func, \
393 check_errata_cve_2017_5715, \
394 CPU_NO_EXTRA2_FUNC, \
395 denver_core_pwr_dwn, \
396 denver_cluster_pwr_dwn
397
398declare_cpu_ops_wa denver, DENVER_MIDR_PN6, \
399 denver_reset_func, \
400 check_errata_cve_2017_5715, \
401 CPU_NO_EXTRA2_FUNC, \
402 denver_core_pwr_dwn, \
403 denver_cluster_pwr_dwn
404
405declare_cpu_ops_wa denver, DENVER_MIDR_PN7, \
406 denver_reset_func, \
407 check_errata_cve_2017_5715, \
408 CPU_NO_EXTRA2_FUNC, \
409 denver_core_pwr_dwn, \
410 denver_cluster_pwr_dwn
411
412declare_cpu_ops_wa denver, DENVER_MIDR_PN8, \
413 denver_reset_func, \
414 check_errata_cve_2017_5715, \
415 CPU_NO_EXTRA2_FUNC, \
416 denver_core_pwr_dwn, \
417 denver_cluster_pwr_dwn