blob: 676973cecb85f6b4ef0314a889a707df4d9ff93a [file] [log] [blame]
Achin Gupta7aea9082014-02-01 07:51:28 +00001/*
Soby Mathewa0fedc42016-06-16 14:52:04 +01002 * Copyright (c) 2013-2016, ARM Limited and Contributors. All rights reserved.
Achin Gupta7aea9082014-02-01 07:51:28 +00003 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * Redistributions of source code must retain the above copyright notice, this
8 * list of conditions and the following disclaimer.
9 *
10 * Redistributions in binary form must reproduce the above copyright notice,
11 * this list of conditions and the following disclaimer in the documentation
12 * and/or other materials provided with the distribution.
13 *
14 * Neither the name of ARM nor the names of its contributors may be used
15 * to endorse or promote products derived from this software without specific
16 * prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
21 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
22 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
23 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
24 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
25 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
26 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
27 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
28 * POSSIBILITY OF SUCH DAMAGE.
29 */
30
31#ifndef __CM_H__
32#define __CM_H__
33
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010034#include <arch.h>
Achin Gupta7aea9082014-02-01 07:51:28 +000035
Achin Gupta7aea9082014-02-01 07:51:28 +000036/*******************************************************************************
Andrew Thoelke4e126072014-06-04 21:10:52 +010037 * Forward declarations
38 ******************************************************************************/
39struct entry_point_info;
40
41/*******************************************************************************
Achin Gupta7aea9082014-02-01 07:51:28 +000042 * Function & variable prototypes
43 ******************************************************************************/
Dan Handleya17fefa2014-05-14 12:38:32 +010044void cm_init(void);
Soby Mathewb0082d22015-04-09 13:40:55 +010045void *cm_get_context_by_index(unsigned int cpu_idx,
46 unsigned int security_state);
47void cm_set_context_by_index(unsigned int cpu_idx,
48 void *context,
49 unsigned int security_state);
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010050void *cm_get_context(uint32_t security_state);
51void cm_set_context(void *context, uint32_t security_state);
Soby Mathewb0082d22015-04-09 13:40:55 +010052void cm_init_my_context(const struct entry_point_info *ep);
53void cm_init_context_by_index(unsigned int cpu_idx,
54 const struct entry_point_info *ep);
Andrew Thoelke4e126072014-06-04 21:10:52 +010055void cm_prepare_el3_exit(uint32_t security_state);
Soby Mathew748be1d2016-05-05 14:10:46 +010056
57#ifndef AARCH32
Dan Handleya17fefa2014-05-14 12:38:32 +010058void cm_el1_sysregs_context_save(uint32_t security_state);
59void cm_el1_sysregs_context_restore(uint32_t security_state);
Soby Mathewa0fedc42016-06-16 14:52:04 +010060void cm_set_elr_el3(uint32_t security_state, uintptr_t entrypoint);
Andrew Thoelke4e126072014-06-04 21:10:52 +010061void cm_set_elr_spsr_el3(uint32_t security_state,
Soby Mathewa0fedc42016-06-16 14:52:04 +010062 uintptr_t entrypoint, uint32_t spsr);
Dan Handleya17fefa2014-05-14 12:38:32 +010063void cm_write_scr_el3_bit(uint32_t security_state,
64 uint32_t bit_pos,
65 uint32_t value);
66void cm_set_next_eret_context(uint32_t security_state);
Dan Handleya17fefa2014-05-14 12:38:32 +010067uint32_t cm_get_scr_el3(uint32_t security_state);
Andrew Thoelke8c28fe02014-06-02 11:40:35 +010068
Soby Mathew748be1d2016-05-05 14:10:46 +010069
70void cm_init_context(uint64_t mpidr,
71 const struct entry_point_info *ep) __deprecated;
72
73void *cm_get_context_by_mpidr(uint64_t mpidr,
74 uint32_t security_state) __deprecated;
75void cm_set_context_by_mpidr(uint64_t mpidr,
76 void *context,
77 uint32_t security_state) __deprecated;
78
Andrew Thoelkec02dbd62014-06-02 10:00:25 +010079/* Inline definitions */
80
81/*******************************************************************************
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010082 * This function is used to program the context that's used for exception
83 * return. This initializes the SP_EL3 to a pointer to a 'cpu_context' set for
84 * the required security state
Andrew Thoelkec02dbd62014-06-02 10:00:25 +010085 ******************************************************************************/
Sandrine Bailleux37a12df2016-04-11 13:17:50 +010086static inline void cm_set_next_context(void *context)
Andrew Thoelkec02dbd62014-06-02 10:00:25 +010087{
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010088#if DEBUG
89 uint64_t sp_mode;
Andrew Thoelkec02dbd62014-06-02 10:00:25 +010090
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010091 /*
92 * Check that this function is called with SP_EL0 as the stack
93 * pointer
94 */
95 __asm__ volatile("mrs %0, SPSel\n"
96 : "=r" (sp_mode));
Andrew Thoelkec02dbd62014-06-02 10:00:25 +010097
Yatharth Kochar6c0566c2015-10-02 17:56:48 +010098 assert(sp_mode == MODE_SP_EL0);
99#endif
Andrew Thoelkec02dbd62014-06-02 10:00:25 +0100100
Yatharth Kochar6c0566c2015-10-02 17:56:48 +0100101 __asm__ volatile("msr spsel, #1\n"
102 "mov sp, %0\n"
103 "msr spsel, #0\n"
104 : : "r" (context));
Andrew Thoelkec02dbd62014-06-02 10:00:25 +0100105}
Yatharth Kochar5d361212016-06-28 17:07:09 +0100106
107#else
108void *cm_get_next_context(void);
Soby Mathew748be1d2016-05-05 14:10:46 +0100109#endif /* AARCH32 */
Yatharth Kochar5d361212016-06-28 17:07:09 +0100110
Achin Gupta7aea9082014-02-01 07:51:28 +0000111#endif /* __CM_H__ */