blob: 7ef4cdd64e0736f3e6512d333af4009a3d707f6f [file] [log] [blame]
Soby Mathew991d42c2015-06-29 16:30:12 +01001/*
Soby Mathew3a9e8bf2015-05-05 16:33:16 +01002 * Copyright (c) 2014-2015, ARM Limited and Contributors. All rights reserved.
Soby Mathew991d42c2015-06-29 16:30:12 +01003 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#include <arch.h>
32#include <asm_macros.S>
33#include <assert_macros.S>
34#include <platform_def.h>
35#include <psci.h>
36
37 .globl psci_do_pwrdown_cache_maintenance
38 .globl psci_do_pwrup_cache_maintenance
39
40/* -----------------------------------------------------------------------
Soby Mathew3a9e8bf2015-05-05 16:33:16 +010041 * void psci_do_pwrdown_cache_maintenance(uint32_t power level);
Soby Mathew991d42c2015-06-29 16:30:12 +010042 *
Soby Mathew3a9e8bf2015-05-05 16:33:16 +010043 * This function performs cache maintenance for the specified power
44 * level. The levels of cache affected are determined by the power
45 * level which is passed as the argument i.e. level 0 results
46 * in a flush of the L1 cache. Both the L1 and L2 caches are flushed
47 * for a higher power level.
Soby Mathew991d42c2015-06-29 16:30:12 +010048 *
49 * Additionally, this function also ensures that stack memory is correctly
50 * flushed out to avoid coherency issues due to a change in its memory
51 * attributes after the data cache is disabled.
52 * -----------------------------------------------------------------------
53 */
54func psci_do_pwrdown_cache_maintenance
55 stp x29, x30, [sp,#-16]!
56 stp x19, x20, [sp,#-16]!
57
Soby Mathew991d42c2015-06-29 16:30:12 +010058 /* ---------------------------------------------
59 * Determine to how many levels of cache will be
Soby Mathew3a9e8bf2015-05-05 16:33:16 +010060 * subject to cache maintenance. Power level
Soby Mathew991d42c2015-06-29 16:30:12 +010061 * 0 implies that only the cpu is being powered
62 * down. Only the L1 data cache needs to be
63 * flushed to the PoU in this case. For a higher
Soby Mathew3a9e8bf2015-05-05 16:33:16 +010064 * power level we are assuming that a flush
Soby Mathew991d42c2015-06-29 16:30:12 +010065 * of L1 data and L2 unified cache is enough.
66 * This information should be provided by the
67 * platform.
68 * ---------------------------------------------
69 */
70 cmp x0, #MPIDR_AFFLVL0
71 b.eq do_core_pwr_dwn
72 bl prepare_cluster_pwr_dwn
73 b do_stack_maintenance
74
75do_core_pwr_dwn:
76 bl prepare_core_pwr_dwn
77
78 /* ---------------------------------------------
79 * Do stack maintenance by flushing the used
80 * stack to the main memory and invalidating the
81 * remainder.
82 * ---------------------------------------------
83 */
84do_stack_maintenance:
Soby Mathewb0082d22015-04-09 13:40:55 +010085 bl plat_get_my_stack
Soby Mathew991d42c2015-06-29 16:30:12 +010086
87 /* ---------------------------------------------
88 * Calculate and store the size of the used
89 * stack memory in x1.
90 * ---------------------------------------------
91 */
92 mov x19, x0
93 mov x1, sp
94 sub x1, x0, x1
95 mov x0, sp
96 bl flush_dcache_range
97
98 /* ---------------------------------------------
99 * Calculate and store the size of the unused
100 * stack memory in x1. Calculate and store the
101 * stack base address in x0.
102 * ---------------------------------------------
103 */
104 sub x0, x19, #PLATFORM_STACK_SIZE
105 sub x1, sp, x0
106 bl inv_dcache_range
107
Soby Mathew991d42c2015-06-29 16:30:12 +0100108 ldp x19, x20, [sp], #16
109 ldp x29, x30, [sp], #16
110 ret
111endfunc psci_do_pwrdown_cache_maintenance
112
113
114/* -----------------------------------------------------------------------
115 * void psci_do_pwrup_cache_maintenance(void);
116 *
117 * This function performs cache maintenance after this cpu is powered up.
118 * Currently, this involves managing the used stack memory before turning
119 * on the data cache.
120 * -----------------------------------------------------------------------
121 */
122func psci_do_pwrup_cache_maintenance
123 stp x29, x30, [sp,#-16]!
124
125 /* ---------------------------------------------
126 * Ensure any inflight stack writes have made it
127 * to main memory.
128 * ---------------------------------------------
129 */
130 dmb st
131
132 /* ---------------------------------------------
133 * Calculate and store the size of the used
134 * stack memory in x1. Calculate and store the
135 * stack base address in x0.
136 * ---------------------------------------------
137 */
Soby Mathewb0082d22015-04-09 13:40:55 +0100138 bl plat_get_my_stack
Soby Mathew991d42c2015-06-29 16:30:12 +0100139 mov x1, sp
140 sub x1, x0, x1
141 mov x0, sp
142 bl inv_dcache_range
143
144 /* ---------------------------------------------
145 * Enable the data cache.
146 * ---------------------------------------------
147 */
148 mrs x0, sctlr_el3
149 orr x0, x0, #SCTLR_C_BIT
150 msr sctlr_el3, x0
151 isb
152
153 ldp x29, x30, [sp], #16
154 ret
155endfunc psci_do_pwrup_cache_maintenance