blob: 07fb889389a3e8600e3631ca451906e643b2bdc9 [file] [log] [blame]
Soby Mathew991d42c2015-06-29 16:30:12 +01001/*
Soby Mathew3a9e8bf2015-05-05 16:33:16 +01002 * Copyright (c) 2014-2015, ARM Limited and Contributors. All rights reserved.
Soby Mathew991d42c2015-06-29 16:30:12 +01003 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#include <arch.h>
32#include <asm_macros.S>
33#include <assert_macros.S>
34#include <platform_def.h>
35#include <psci.h>
36
37 .globl psci_do_pwrdown_cache_maintenance
38 .globl psci_do_pwrup_cache_maintenance
39
40/* -----------------------------------------------------------------------
Soby Mathew3a9e8bf2015-05-05 16:33:16 +010041 * void psci_do_pwrdown_cache_maintenance(uint32_t power level);
Soby Mathew991d42c2015-06-29 16:30:12 +010042 *
Soby Mathew3a9e8bf2015-05-05 16:33:16 +010043 * This function performs cache maintenance for the specified power
44 * level. The levels of cache affected are determined by the power
45 * level which is passed as the argument i.e. level 0 results
46 * in a flush of the L1 cache. Both the L1 and L2 caches are flushed
47 * for a higher power level.
Soby Mathew991d42c2015-06-29 16:30:12 +010048 *
49 * Additionally, this function also ensures that stack memory is correctly
50 * flushed out to avoid coherency issues due to a change in its memory
51 * attributes after the data cache is disabled.
52 * -----------------------------------------------------------------------
53 */
54func psci_do_pwrdown_cache_maintenance
55 stp x29, x30, [sp,#-16]!
56 stp x19, x20, [sp,#-16]!
57
Soby Mathew991d42c2015-06-29 16:30:12 +010058 /* ---------------------------------------------
59 * Determine to how many levels of cache will be
Soby Mathew3a9e8bf2015-05-05 16:33:16 +010060 * subject to cache maintenance. Power level
Soby Mathew991d42c2015-06-29 16:30:12 +010061 * 0 implies that only the cpu is being powered
62 * down. Only the L1 data cache needs to be
63 * flushed to the PoU in this case. For a higher
Soby Mathew3a9e8bf2015-05-05 16:33:16 +010064 * power level we are assuming that a flush
Soby Mathew991d42c2015-06-29 16:30:12 +010065 * of L1 data and L2 unified cache is enough.
66 * This information should be provided by the
67 * platform.
68 * ---------------------------------------------
69 */
70 cmp x0, #MPIDR_AFFLVL0
71 b.eq do_core_pwr_dwn
72 bl prepare_cluster_pwr_dwn
73 b do_stack_maintenance
74
75do_core_pwr_dwn:
76 bl prepare_core_pwr_dwn
77
78 /* ---------------------------------------------
79 * Do stack maintenance by flushing the used
80 * stack to the main memory and invalidating the
81 * remainder.
82 * ---------------------------------------------
83 */
84do_stack_maintenance:
85 mrs x0, mpidr_el1
86 bl platform_get_stack
87
88 /* ---------------------------------------------
89 * Calculate and store the size of the used
90 * stack memory in x1.
91 * ---------------------------------------------
92 */
93 mov x19, x0
94 mov x1, sp
95 sub x1, x0, x1
96 mov x0, sp
97 bl flush_dcache_range
98
99 /* ---------------------------------------------
100 * Calculate and store the size of the unused
101 * stack memory in x1. Calculate and store the
102 * stack base address in x0.
103 * ---------------------------------------------
104 */
105 sub x0, x19, #PLATFORM_STACK_SIZE
106 sub x1, sp, x0
107 bl inv_dcache_range
108
Soby Mathew991d42c2015-06-29 16:30:12 +0100109 ldp x19, x20, [sp], #16
110 ldp x29, x30, [sp], #16
111 ret
112endfunc psci_do_pwrdown_cache_maintenance
113
114
115/* -----------------------------------------------------------------------
116 * void psci_do_pwrup_cache_maintenance(void);
117 *
118 * This function performs cache maintenance after this cpu is powered up.
119 * Currently, this involves managing the used stack memory before turning
120 * on the data cache.
121 * -----------------------------------------------------------------------
122 */
123func psci_do_pwrup_cache_maintenance
124 stp x29, x30, [sp,#-16]!
125
126 /* ---------------------------------------------
127 * Ensure any inflight stack writes have made it
128 * to main memory.
129 * ---------------------------------------------
130 */
131 dmb st
132
133 /* ---------------------------------------------
134 * Calculate and store the size of the used
135 * stack memory in x1. Calculate and store the
136 * stack base address in x0.
137 * ---------------------------------------------
138 */
139 mrs x0, mpidr_el1
140 bl platform_get_stack
141 mov x1, sp
142 sub x1, x0, x1
143 mov x0, sp
144 bl inv_dcache_range
145
146 /* ---------------------------------------------
147 * Enable the data cache.
148 * ---------------------------------------------
149 */
150 mrs x0, sctlr_el3
151 orr x0, x0, #SCTLR_C_BIT
152 msr sctlr_el3, x0
153 isb
154
155 ldp x29, x30, [sp], #16
156 ret
157endfunc psci_do_pwrup_cache_maintenance