blob: ab136adf44fed186a74b0e888a7188568fd8e45d [file] [log] [blame]
Etienne Carriere4ece7552017-11-05 22:56:10 +01001/*
Ambroise Vincentd4a51eb2019-03-04 16:56:26 +00002 * Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved.
Etienne Carriere4ece7552017-11-05 22:56:10 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <cortex_a15.h>
11#include <cpu_macros.S>
12
13/*
14 * Cortex-A15 support LPAE and Virtualization Extensions.
15 * Don't care if confiugration uses or not LPAE and VE.
16 * Therefore, where we don't check ARCH_IS_ARMV7_WITH_LPAE/VE
17 */
18
19 .macro assert_cache_enabled
20#if ENABLE_ASSERTIONS
21 ldcopr r0, SCTLR
22 tst r0, #SCTLR_C_BIT
23 ASM_ASSERT(eq)
24#endif
25 .endm
26
27func cortex_a15_disable_smp
28 ldcopr r0, ACTLR
29 bic r0, #CORTEX_A15_ACTLR_SMP_BIT
30 stcopr r0, ACTLR
31 isb
Ambroise Vincentd4a51eb2019-03-04 16:56:26 +000032#if ERRATA_A15_816470
33 /*
34 * Invalidate any TLB address
35 */
36 mov r0, #0
37 stcopr r0, TLBIMVA
38#endif
Etienne Carriere4ece7552017-11-05 22:56:10 +010039 dsb sy
40 bx lr
41endfunc cortex_a15_disable_smp
42
43func cortex_a15_enable_smp
44 ldcopr r0, ACTLR
45 orr r0, #CORTEX_A15_ACTLR_SMP_BIT
46 stcopr r0, ACTLR
47 isb
48 bx lr
49endfunc cortex_a15_enable_smp
50
Ambroise Vincentd4a51eb2019-03-04 16:56:26 +000051 /* ----------------------------------------------------
52 * Errata Workaround for Cortex A15 Errata #816470.
53 * This applies only to revision >= r3p0 of Cortex A15.
54 * ----------------------------------------------------
55 */
56func check_errata_816470
57 /*
58 * Even though this is only needed for revision >= r3p0, it is always
59 * applied because of the low cost of the workaround.
60 */
61 mov r0, #ERRATA_APPLIES
62 bx lr
63endfunc check_errata_816470
64
Ambroise Vincent68b38122019-03-05 09:54:21 +000065 /* ----------------------------------------------------
66 * Errata Workaround for Cortex A15 Errata #827671.
67 * This applies only to revision >= r3p0 of Cortex A15.
68 * Inputs:
69 * r0: variant[4:7] and revision[0:3] of current cpu.
70 * Shall clobber: r0-r3
71 * ----------------------------------------------------
72 */
73func errata_a15_827671_wa
74 /*
75 * Compare r0 against revision r3p0
76 */
77 mov r2, lr
78 bl check_errata_827671
79 cmp r0, #ERRATA_NOT_APPLIES
80 beq 1f
81 ldcopr r0, CORTEX_A15_ACTLR2
82 orr r0, #CORTEX_A15_ACTLR2_INV_DCC_BIT
83 stcopr r0, CORTEX_A15_ACTLR2
84 isb
851:
86 bx r2
87endfunc errata_a15_827671_wa
88
89func check_errata_827671
90 mov r1, #0x30
91 b cpu_rev_var_hs
92endfunc check_errata_827671
93
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +000094func check_errata_cve_2017_5715
95#if WORKAROUND_CVE_2017_5715
96 mov r0, #ERRATA_APPLIES
97#else
98 mov r0, #ERRATA_MISSING
99#endif
100 bx lr
101endfunc check_errata_cve_2017_5715
102
103#if REPORT_ERRATA
104/*
105 * Errata printing function for Cortex A15. Must follow AAPCS.
106 */
107func cortex_a15_errata_report
108 push {r12, lr}
109
110 bl cpu_get_rev_var
111 mov r4, r0
112
113 /*
114 * Report all errata. The revision-variant information is passed to
115 * checking functions of each errata.
116 */
Ambroise Vincentd4a51eb2019-03-04 16:56:26 +0000117 report_errata ERRATA_A15_816470, cortex_a15, 816470
Ambroise Vincent68b38122019-03-05 09:54:21 +0000118 report_errata ERRATA_A15_827671, cortex_a15, 827671
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +0000119 report_errata WORKAROUND_CVE_2017_5715, cortex_a15, cve_2017_5715
120
121 pop {r12, lr}
122 bx lr
123endfunc cortex_a15_errata_report
124#endif
125
Etienne Carriere4ece7552017-11-05 22:56:10 +0100126func cortex_a15_reset_func
Ambroise Vincent68b38122019-03-05 09:54:21 +0000127 mov r5, lr
128 bl cpu_get_rev_var
129
130#if ERRATA_A15_827671
131 bl errata_a15_827671_wa
132#endif
133
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +0000134#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
135 ldcopr r0, ACTLR
136 orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT
137 stcopr r0, ACTLR
138 ldr r0, =workaround_icache_inv_runtime_exceptions
139 stcopr r0, VBAR
140 stcopr r0, MVBAR
141 /* isb will be applied in the course of the reset func */
142#endif
Ambroise Vincent68b38122019-03-05 09:54:21 +0000143
144 mov lr, r5
Etienne Carriere4ece7552017-11-05 22:56:10 +0100145 b cortex_a15_enable_smp
146endfunc cortex_a15_reset_func
147
148func cortex_a15_core_pwr_dwn
149 push {r12, lr}
150
151 assert_cache_enabled
152
153 /* Flush L1 cache */
154 mov r0, #DC_OP_CISW
155 bl dcsw_op_level1
156
157 /* Exit cluster coherency */
158 pop {r12, lr}
159 b cortex_a15_disable_smp
160endfunc cortex_a15_core_pwr_dwn
161
162func cortex_a15_cluster_pwr_dwn
163 push {r12, lr}
164
165 assert_cache_enabled
166
167 /* Flush L1 caches */
168 mov r0, #DC_OP_CISW
169 bl dcsw_op_level1
170
171 bl plat_disable_acp
172
173 /* Exit cluster coherency */
174 pop {r12, lr}
175 b cortex_a15_disable_smp
176endfunc cortex_a15_cluster_pwr_dwn
177
178declare_cpu_ops cortex_a15, CORTEX_A15_MIDR, \
179 cortex_a15_reset_func, \
180 cortex_a15_core_pwr_dwn, \
181 cortex_a15_cluster_pwr_dwn