blob: 1143e9b28b74fbecafca3bea75acad1124ceed02 [file] [log] [blame]
Etienne Carriere4ece7552017-11-05 22:56:10 +01001/*
John Powell7f7c6fa2022-04-14 19:10:17 -05002 * Copyright (c) 2016-2022, Arm Limited and Contributors. All rights reserved.
Etienne Carriere4ece7552017-11-05 22:56:10 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <cortex_a15.h>
11#include <cpu_macros.S>
12
13/*
14 * Cortex-A15 support LPAE and Virtualization Extensions.
15 * Don't care if confiugration uses or not LPAE and VE.
16 * Therefore, where we don't check ARCH_IS_ARMV7_WITH_LPAE/VE
17 */
18
19 .macro assert_cache_enabled
20#if ENABLE_ASSERTIONS
21 ldcopr r0, SCTLR
22 tst r0, #SCTLR_C_BIT
23 ASM_ASSERT(eq)
24#endif
25 .endm
26
27func cortex_a15_disable_smp
28 ldcopr r0, ACTLR
29 bic r0, #CORTEX_A15_ACTLR_SMP_BIT
30 stcopr r0, ACTLR
31 isb
Ambroise Vincentd4a51eb2019-03-04 16:56:26 +000032#if ERRATA_A15_816470
33 /*
34 * Invalidate any TLB address
35 */
36 mov r0, #0
37 stcopr r0, TLBIMVA
38#endif
Etienne Carriere4ece7552017-11-05 22:56:10 +010039 dsb sy
40 bx lr
41endfunc cortex_a15_disable_smp
42
43func cortex_a15_enable_smp
44 ldcopr r0, ACTLR
45 orr r0, #CORTEX_A15_ACTLR_SMP_BIT
46 stcopr r0, ACTLR
47 isb
48 bx lr
49endfunc cortex_a15_enable_smp
50
Ambroise Vincentd4a51eb2019-03-04 16:56:26 +000051 /* ----------------------------------------------------
52 * Errata Workaround for Cortex A15 Errata #816470.
53 * This applies only to revision >= r3p0 of Cortex A15.
54 * ----------------------------------------------------
55 */
56func check_errata_816470
57 /*
58 * Even though this is only needed for revision >= r3p0, it is always
59 * applied because of the low cost of the workaround.
60 */
61 mov r0, #ERRATA_APPLIES
62 bx lr
63endfunc check_errata_816470
64
Ambroise Vincent68b38122019-03-05 09:54:21 +000065 /* ----------------------------------------------------
66 * Errata Workaround for Cortex A15 Errata #827671.
67 * This applies only to revision >= r3p0 of Cortex A15.
68 * Inputs:
69 * r0: variant[4:7] and revision[0:3] of current cpu.
70 * Shall clobber: r0-r3
71 * ----------------------------------------------------
72 */
73func errata_a15_827671_wa
74 /*
75 * Compare r0 against revision r3p0
76 */
77 mov r2, lr
78 bl check_errata_827671
79 cmp r0, #ERRATA_NOT_APPLIES
80 beq 1f
81 ldcopr r0, CORTEX_A15_ACTLR2
82 orr r0, #CORTEX_A15_ACTLR2_INV_DCC_BIT
83 stcopr r0, CORTEX_A15_ACTLR2
84 isb
851:
86 bx r2
87endfunc errata_a15_827671_wa
88
89func check_errata_827671
90 mov r1, #0x30
91 b cpu_rev_var_hs
92endfunc check_errata_827671
93
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +000094func check_errata_cve_2017_5715
95#if WORKAROUND_CVE_2017_5715
96 mov r0, #ERRATA_APPLIES
97#else
98 mov r0, #ERRATA_MISSING
99#endif
100 bx lr
101endfunc check_errata_cve_2017_5715
102
John Powell7f7c6fa2022-04-14 19:10:17 -0500103func check_errata_cve_2022_23960
104#if WORKAROUND_CVE_2022_23960
105 mov r0, #ERRATA_APPLIES
106#else
107 mov r0, #ERRATA_MISSING
108#endif
109 bx lr
110endfunc check_errata_cve_2022_23960
111
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +0000112#if REPORT_ERRATA
113/*
114 * Errata printing function for Cortex A15. Must follow AAPCS.
115 */
116func cortex_a15_errata_report
117 push {r12, lr}
118
119 bl cpu_get_rev_var
120 mov r4, r0
121
122 /*
123 * Report all errata. The revision-variant information is passed to
124 * checking functions of each errata.
125 */
Ambroise Vincentd4a51eb2019-03-04 16:56:26 +0000126 report_errata ERRATA_A15_816470, cortex_a15, 816470
Ambroise Vincent68b38122019-03-05 09:54:21 +0000127 report_errata ERRATA_A15_827671, cortex_a15, 827671
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +0000128 report_errata WORKAROUND_CVE_2017_5715, cortex_a15, cve_2017_5715
John Powell7f7c6fa2022-04-14 19:10:17 -0500129 report_errata WORKAROUND_CVE_2022_23960, cortex_a15, cve_2022_23960
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +0000130
131 pop {r12, lr}
132 bx lr
133endfunc cortex_a15_errata_report
134#endif
135
Etienne Carriere4ece7552017-11-05 22:56:10 +0100136func cortex_a15_reset_func
Ambroise Vincent68b38122019-03-05 09:54:21 +0000137 mov r5, lr
138 bl cpu_get_rev_var
139
140#if ERRATA_A15_827671
141 bl errata_a15_827671_wa
142#endif
143
John Powell7f7c6fa2022-04-14 19:10:17 -0500144#if IMAGE_BL32 && (WORKAROUND_CVE_2017_5715 || WORKAROUND_CVE_2022_23960)
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +0000145 ldcopr r0, ACTLR
146 orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT
147 stcopr r0, ACTLR
John Powell7f7c6fa2022-04-14 19:10:17 -0500148 ldr r0, =wa_cve_2017_5715_icache_inv_vbar
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +0000149 stcopr r0, VBAR
150 stcopr r0, MVBAR
151 /* isb will be applied in the course of the reset func */
152#endif
Ambroise Vincent68b38122019-03-05 09:54:21 +0000153
154 mov lr, r5
Etienne Carriere4ece7552017-11-05 22:56:10 +0100155 b cortex_a15_enable_smp
156endfunc cortex_a15_reset_func
157
158func cortex_a15_core_pwr_dwn
159 push {r12, lr}
160
161 assert_cache_enabled
162
163 /* Flush L1 cache */
164 mov r0, #DC_OP_CISW
165 bl dcsw_op_level1
166
167 /* Exit cluster coherency */
168 pop {r12, lr}
169 b cortex_a15_disable_smp
170endfunc cortex_a15_core_pwr_dwn
171
172func cortex_a15_cluster_pwr_dwn
173 push {r12, lr}
174
175 assert_cache_enabled
176
177 /* Flush L1 caches */
178 mov r0, #DC_OP_CISW
179 bl dcsw_op_level1
180
181 bl plat_disable_acp
182
183 /* Exit cluster coherency */
184 pop {r12, lr}
185 b cortex_a15_disable_smp
186endfunc cortex_a15_cluster_pwr_dwn
187
188declare_cpu_ops cortex_a15, CORTEX_A15_MIDR, \
189 cortex_a15_reset_func, \
190 cortex_a15_core_pwr_dwn, \
191 cortex_a15_cluster_pwr_dwn