blob: b6c61ab7f57a4d7a5d94f6d0a8640bb8b3587d5b [file] [log] [blame]
Etienne Carriere4ece7552017-11-05 22:56:10 +01001/*
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +00002 * Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
Etienne Carriere4ece7552017-11-05 22:56:10 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <cortex_a15.h>
11#include <cpu_macros.S>
12
13/*
14 * Cortex-A15 support LPAE and Virtualization Extensions.
15 * Don't care if confiugration uses or not LPAE and VE.
16 * Therefore, where we don't check ARCH_IS_ARMV7_WITH_LPAE/VE
17 */
18
19 .macro assert_cache_enabled
20#if ENABLE_ASSERTIONS
21 ldcopr r0, SCTLR
22 tst r0, #SCTLR_C_BIT
23 ASM_ASSERT(eq)
24#endif
25 .endm
26
27func cortex_a15_disable_smp
28 ldcopr r0, ACTLR
29 bic r0, #CORTEX_A15_ACTLR_SMP_BIT
30 stcopr r0, ACTLR
31 isb
32 dsb sy
33 bx lr
34endfunc cortex_a15_disable_smp
35
36func cortex_a15_enable_smp
37 ldcopr r0, ACTLR
38 orr r0, #CORTEX_A15_ACTLR_SMP_BIT
39 stcopr r0, ACTLR
40 isb
41 bx lr
42endfunc cortex_a15_enable_smp
43
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +000044func check_errata_cve_2017_5715
45#if WORKAROUND_CVE_2017_5715
46 mov r0, #ERRATA_APPLIES
47#else
48 mov r0, #ERRATA_MISSING
49#endif
50 bx lr
51endfunc check_errata_cve_2017_5715
52
53#if REPORT_ERRATA
54/*
55 * Errata printing function for Cortex A15. Must follow AAPCS.
56 */
57func cortex_a15_errata_report
58 push {r12, lr}
59
60 bl cpu_get_rev_var
61 mov r4, r0
62
63 /*
64 * Report all errata. The revision-variant information is passed to
65 * checking functions of each errata.
66 */
67 report_errata WORKAROUND_CVE_2017_5715, cortex_a15, cve_2017_5715
68
69 pop {r12, lr}
70 bx lr
71endfunc cortex_a15_errata_report
72#endif
73
Etienne Carriere4ece7552017-11-05 22:56:10 +010074func cortex_a15_reset_func
Dimitris Papastamos8ca0af22018-01-03 10:48:59 +000075#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
76 ldcopr r0, ACTLR
77 orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT
78 stcopr r0, ACTLR
79 ldr r0, =workaround_icache_inv_runtime_exceptions
80 stcopr r0, VBAR
81 stcopr r0, MVBAR
82 /* isb will be applied in the course of the reset func */
83#endif
Etienne Carriere4ece7552017-11-05 22:56:10 +010084 b cortex_a15_enable_smp
85endfunc cortex_a15_reset_func
86
87func cortex_a15_core_pwr_dwn
88 push {r12, lr}
89
90 assert_cache_enabled
91
92 /* Flush L1 cache */
93 mov r0, #DC_OP_CISW
94 bl dcsw_op_level1
95
96 /* Exit cluster coherency */
97 pop {r12, lr}
98 b cortex_a15_disable_smp
99endfunc cortex_a15_core_pwr_dwn
100
101func cortex_a15_cluster_pwr_dwn
102 push {r12, lr}
103
104 assert_cache_enabled
105
106 /* Flush L1 caches */
107 mov r0, #DC_OP_CISW
108 bl dcsw_op_level1
109
110 bl plat_disable_acp
111
112 /* Exit cluster coherency */
113 pop {r12, lr}
114 b cortex_a15_disable_smp
115endfunc cortex_a15_cluster_pwr_dwn
116
117declare_cpu_ops cortex_a15, CORTEX_A15_MIDR, \
118 cortex_a15_reset_func, \
119 cortex_a15_core_pwr_dwn, \
120 cortex_a15_cluster_pwr_dwn