blob: b8abd3361fcb7d36d26c8220bbc2c82aedd8173d [file] [log] [blame]
Etienne Carriere010dd1f2017-11-05 22:56:41 +01001/*
Ambroise Vincent8cf9eef2019-02-28 16:23:53 +00002 * Copyright (c) 2017-2019, ARM Limited and Contributors. All rights reserved.
Etienne Carriere010dd1f2017-11-05 22:56:41 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <cortex_a17.h>
11#include <cpu_macros.S>
12
13 .macro assert_cache_enabled
14#if ENABLE_ASSERTIONS
15 ldcopr r0, SCTLR
16 tst r0, #SCTLR_C_BIT
17 ASM_ASSERT(eq)
18#endif
19 .endm
20
21func cortex_a17_disable_smp
22 ldcopr r0, ACTLR
23 bic r0, #CORTEX_A17_ACTLR_SMP_BIT
24 stcopr r0, ACTLR
25 isb
26 dsb sy
27 bx lr
28endfunc cortex_a17_disable_smp
29
30func cortex_a17_enable_smp
31 ldcopr r0, ACTLR
32 orr r0, #CORTEX_A17_ACTLR_SMP_BIT
33 stcopr r0, ACTLR
34 isb
35 bx lr
36endfunc cortex_a17_enable_smp
37
Ambroise Vincent8cf9eef2019-02-28 16:23:53 +000038 /* ----------------------------------------------------
39 * Errata Workaround for Cortex A17 Errata #852421.
40 * This applies only to revision <= r1p2 of Cortex A17.
41 * Inputs:
42 * r0: variant[4:7] and revision[0:3] of current cpu.
43 * Shall clobber: r0-r3
44 * ----------------------------------------------------
45 */
46func errata_a17_852421_wa
47 /*
48 * Compare r0 against revision r1p2
49 */
50 mov r2, lr
51 bl check_errata_852421
52 cmp r0, #ERRATA_NOT_APPLIES
53 beq 1f
54 ldcopr r0, CORTEX_A17_IMP_DEF_REG1
55 orr r0, r0, #(1<<24)
56 stcopr r0, CORTEX_A17_IMP_DEF_REG1
571:
58 bx r2
59endfunc errata_a17_852421_wa
60
61func check_errata_852421
62 mov r1, #0x12
63 b cpu_rev_var_ls
64endfunc check_errata_852421
65
Ambroise Vincentfa5c9512019-03-04 13:20:56 +000066 /* ----------------------------------------------------
67 * Errata Workaround for Cortex A17 Errata #852423.
68 * This applies only to revision <= r1p2 of Cortex A17.
69 * Inputs:
70 * r0: variant[4:7] and revision[0:3] of current cpu.
71 * Shall clobber: r0-r3
72 * ----------------------------------------------------
73 */
74func errata_a17_852423_wa
75 /*
76 * Compare r0 against revision r1p2
77 */
78 mov r2, lr
79 bl check_errata_852423
80 cmp r0, #ERRATA_NOT_APPLIES
81 beq 1f
82 ldcopr r0, CORTEX_A17_IMP_DEF_REG1
83 orr r0, r0, #(1<<12)
84 stcopr r0, CORTEX_A17_IMP_DEF_REG1
851:
86 bx r2
87endfunc errata_a17_852423_wa
88
89func check_errata_852423
90 mov r1, #0x12
91 b cpu_rev_var_ls
92endfunc check_errata_852423
93
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +000094func check_errata_cve_2017_5715
95#if WORKAROUND_CVE_2017_5715
96 mov r0, #ERRATA_APPLIES
97#else
98 mov r0, #ERRATA_MISSING
99#endif
100 bx lr
101endfunc check_errata_cve_2017_5715
102
103#if REPORT_ERRATA
104/*
105 * Errata printing function for Cortex A17. Must follow AAPCS.
106 */
107func cortex_a17_errata_report
108 push {r12, lr}
109
110 bl cpu_get_rev_var
111 mov r4, r0
112
113 /*
114 * Report all errata. The revision-variant information is passed to
115 * checking functions of each errata.
116 */
Ambroise Vincent8cf9eef2019-02-28 16:23:53 +0000117 report_errata ERRATA_A17_852421, cortex_a17, 852421
Ambroise Vincentfa5c9512019-03-04 13:20:56 +0000118 report_errata ERRATA_A17_852423, cortex_a17, 852423
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +0000119 report_errata WORKAROUND_CVE_2017_5715, cortex_a17, cve_2017_5715
120
121 pop {r12, lr}
122 bx lr
123endfunc cortex_a17_errata_report
124#endif
125
Etienne Carriere010dd1f2017-11-05 22:56:41 +0100126func cortex_a17_reset_func
Ambroise Vincent8cf9eef2019-02-28 16:23:53 +0000127 mov r5, lr
128 bl cpu_get_rev_var
Ambroise Vincentfa5c9512019-03-04 13:20:56 +0000129 mov r4, r0
Ambroise Vincent8cf9eef2019-02-28 16:23:53 +0000130
131#if ERRATA_A17_852421
Ambroise Vincentfa5c9512019-03-04 13:20:56 +0000132 mov r0, r4
Ambroise Vincent8cf9eef2019-02-28 16:23:53 +0000133 bl errata_a17_852421_wa
134#endif
135
Ambroise Vincentfa5c9512019-03-04 13:20:56 +0000136#if ERRATA_A17_852423
137 mov r0, r4
138 bl errata_a17_852423_wa
139#endif
140
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +0000141#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
142 ldr r0, =workaround_bpiall_runtime_exceptions
143 stcopr r0, VBAR
144 stcopr r0, MVBAR
145 /* isb will be applied in the course of the reset func */
146#endif
Ambroise Vincent8cf9eef2019-02-28 16:23:53 +0000147
148 mov lr, r5
Etienne Carriere010dd1f2017-11-05 22:56:41 +0100149 b cortex_a17_enable_smp
150endfunc cortex_a17_reset_func
151
152func cortex_a17_core_pwr_dwn
153 push {r12, lr}
154
155 assert_cache_enabled
156
157 /* Flush L1 cache */
158 mov r0, #DC_OP_CISW
159 bl dcsw_op_level1
160
161 /* Exit cluster coherency */
162 pop {r12, lr}
163 b cortex_a17_disable_smp
164endfunc cortex_a17_core_pwr_dwn
165
166func cortex_a17_cluster_pwr_dwn
167 push {r12, lr}
168
169 assert_cache_enabled
170
171 /* Flush L1 caches */
172 mov r0, #DC_OP_CISW
173 bl dcsw_op_level1
174
175 bl plat_disable_acp
176
177 /* Exit cluster coherency */
178 pop {r12, lr}
179 b cortex_a17_disable_smp
180endfunc cortex_a17_cluster_pwr_dwn
181
182declare_cpu_ops cortex_a17, CORTEX_A17_MIDR, \
183 cortex_a17_reset_func, \
184 cortex_a17_core_pwr_dwn, \
185 cortex_a17_cluster_pwr_dwn