blob: 4842ca63dc0115f6cfc43b0bf6553e07051e0b1f [file] [log] [blame]
Etienne Carrieref2f7b912017-11-05 22:56:34 +01001/*
Govindraj Raja8fab3b32023-04-26 14:59:21 -05002 * Copyright (c) 2017-2023, Arm Limited and Contributors. All rights reserved.
Etienne Carrieref2f7b912017-11-05 22:56:34 +01003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10#include <cortex_a7.h>
11#include <cpu_macros.S>
12
13 .macro assert_cache_enabled
14#if ENABLE_ASSERTIONS
15 ldcopr r0, SCTLR
16 tst r0, #SCTLR_C_BIT
17 ASM_ASSERT(eq)
18#endif
19 .endm
20
21func cortex_a7_disable_smp
22 ldcopr r0, ACTLR
23 bic r0, #CORTEX_A7_ACTLR_SMP_BIT
24 stcopr r0, ACTLR
25 isb
26 dsb sy
27 bx lr
28endfunc cortex_a7_disable_smp
29
30func cortex_a7_enable_smp
31 ldcopr r0, ACTLR
32 orr r0, #CORTEX_A7_ACTLR_SMP_BIT
33 stcopr r0, ACTLR
34 isb
35 bx lr
36endfunc cortex_a7_enable_smp
37
38func cortex_a7_reset_func
39 b cortex_a7_enable_smp
40endfunc cortex_a7_reset_func
41
42func cortex_a7_core_pwr_dwn
43 push {r12, lr}
44
45 assert_cache_enabled
46
47 /* Flush L1 cache */
48 mov r0, #DC_OP_CISW
49 bl dcsw_op_level1
50
51 /* Exit cluster coherency */
52 pop {r12, lr}
53 b cortex_a7_disable_smp
54endfunc cortex_a7_core_pwr_dwn
55
56func cortex_a7_cluster_pwr_dwn
57 push {r12, lr}
58
59 assert_cache_enabled
60
61 /* Flush L1 caches */
62 mov r0, #DC_OP_CISW
63 bl dcsw_op_level1
64
65 bl plat_disable_acp
66
Stephan Gerholded1cc202023-03-19 20:30:58 +010067 /* Flush L2 caches */
68 mov r0, #DC_OP_CISW
69 bl dcsw_op_level2
70
Etienne Carrieref2f7b912017-11-05 22:56:34 +010071 /* Exit cluster coherency */
72 pop {r12, lr}
73 b cortex_a7_disable_smp
74endfunc cortex_a7_cluster_pwr_dwn
75
Govindraj Raja8fab3b32023-04-26 14:59:21 -050076errata_report_shim cortex_a7
Soby Mathew0980dce2018-09-17 04:34:35 +010077
Etienne Carrieref2f7b912017-11-05 22:56:34 +010078declare_cpu_ops cortex_a7, CORTEX_A7_MIDR, \
79 cortex_a7_reset_func, \
80 cortex_a7_core_pwr_dwn, \
81 cortex_a7_cluster_pwr_dwn