blob: b551add01164936629770128efd2d9f0b8592804 [file] [log] [blame]
Achin Gupta86f23532019-10-11 15:41:16 +01001/*
2 * Copyright (c) 2020, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <assert.h>
8#include <errno.h>
9#include <string.h>
10
11#include <arch_helpers.h>
Olivier Deprez2bae35f2020-04-16 13:39:06 +020012#include <arch/aarch64/arch_features.h>
Achin Gupta86f23532019-10-11 15:41:16 +010013#include <bl31/bl31.h>
14#include <common/debug.h>
15#include <common/runtime_svc.h>
16#include <lib/el3_runtime/context_mgmt.h>
17#include <lib/smccc.h>
18#include <lib/spinlock.h>
19#include <lib/utils.h>
Achin Gupta86f23532019-10-11 15:41:16 +010020#include <plat/common/common_def.h>
21#include <plat/common/platform.h>
22#include <platform_def.h>
J-Alves2672cde2020-05-07 18:42:25 +010023#include <services/ffa_svc.h>
Achin Gupta86f23532019-10-11 15:41:16 +010024#include <services/spmd_svc.h>
25#include <smccc_helpers.h>
26#include "spmd_private.h"
27
28/*******************************************************************************
29 * SPM Core context information.
30 ******************************************************************************/
Olivier Deprez2bae35f2020-04-16 13:39:06 +020031static spmd_spm_core_context_t spm_core_context[PLATFORM_CORE_COUNT];
Achin Gupta86f23532019-10-11 15:41:16 +010032
33/*******************************************************************************
34 * SPM Core attribute information read from its manifest.
35 ******************************************************************************/
Olivier Deprez2bae35f2020-04-16 13:39:06 +020036static spmc_manifest_attribute_t spmc_attrs;
Achin Gupta86f23532019-10-11 15:41:16 +010037
38/*******************************************************************************
Max Shvetsov745889c2020-02-27 14:54:21 +000039 * SPM Core entry point information. Discovered on the primary core and reused
40 * on secondary cores.
41 ******************************************************************************/
42static entry_point_info_t *spmc_ep_info;
43
44/*******************************************************************************
Olivier Deprez2bae35f2020-04-16 13:39:06 +020045 * SPM Core context on current CPU get helper.
46 ******************************************************************************/
47spmd_spm_core_context_t *spmd_get_context(void)
48{
49 unsigned int linear_id = plat_my_core_pos();
50
51 return &spm_core_context[linear_id];
52}
53
54/*******************************************************************************
Olivier Deprez87a9ee72019-10-28 08:52:45 +000055 * SPM Core entry point information get helper.
56 ******************************************************************************/
57entry_point_info_t *spmd_spmc_ep_info_get(void)
58{
59 return spmc_ep_info;
60}
61
62/*******************************************************************************
Max Shvetsov745889c2020-02-27 14:54:21 +000063 * Static function declaration.
64 ******************************************************************************/
Olivier Deprez2bae35f2020-04-16 13:39:06 +020065static int32_t spmd_init(void);
Olivier Deprez69ca84a2020-02-07 15:44:43 +010066static int spmd_spmc_init(void *pm_addr);
J-Alves2672cde2020-05-07 18:42:25 +010067static uint64_t spmd_ffa_error_return(void *handle,
Olivier Deprez2bae35f2020-04-16 13:39:06 +020068 int error_code);
69static uint64_t spmd_smc_forward(uint32_t smc_fid,
70 bool secure_origin,
71 uint64_t x1,
72 uint64_t x2,
73 uint64_t x3,
74 uint64_t x4,
75 void *handle);
Max Shvetsov745889c2020-02-27 14:54:21 +000076
77/*******************************************************************************
Olivier Deprez2bae35f2020-04-16 13:39:06 +020078 * This function takes an SPMC context pointer and performs a synchronous
79 * SPMC entry.
Achin Gupta86f23532019-10-11 15:41:16 +010080 ******************************************************************************/
81uint64_t spmd_spm_core_sync_entry(spmd_spm_core_context_t *spmc_ctx)
82{
83 uint64_t rc;
84
85 assert(spmc_ctx != NULL);
86
87 cm_set_context(&(spmc_ctx->cpu_ctx), SECURE);
88
89 /* Restore the context assigned above */
90 cm_el1_sysregs_context_restore(SECURE);
Max Shvetsove7fd80e2020-02-25 13:55:00 +000091#if SPMD_SPM_AT_SEL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +000092 cm_el2_sysregs_context_restore(SECURE);
Max Shvetsove7fd80e2020-02-25 13:55:00 +000093#endif
Achin Gupta86f23532019-10-11 15:41:16 +010094 cm_set_next_eret_context(SECURE);
95
Max Shvetsove7fd80e2020-02-25 13:55:00 +000096 /* Enter SPMC */
Achin Gupta86f23532019-10-11 15:41:16 +010097 rc = spmd_spm_core_enter(&spmc_ctx->c_rt_ctx);
98
99 /* Save secure state */
100 cm_el1_sysregs_context_save(SECURE);
Max Shvetsove7fd80e2020-02-25 13:55:00 +0000101#if SPMD_SPM_AT_SEL2
Max Shvetsovbdf502d2020-02-25 13:56:19 +0000102 cm_el2_sysregs_context_save(SECURE);
Max Shvetsove7fd80e2020-02-25 13:55:00 +0000103#endif
Achin Gupta86f23532019-10-11 15:41:16 +0100104
105 return rc;
106}
107
108/*******************************************************************************
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200109 * This function returns to the place where spmd_spm_core_sync_entry() was
Achin Gupta86f23532019-10-11 15:41:16 +0100110 * called originally.
111 ******************************************************************************/
112__dead2 void spmd_spm_core_sync_exit(uint64_t rc)
113{
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200114 spmd_spm_core_context_t *ctx = spmd_get_context();
Achin Gupta86f23532019-10-11 15:41:16 +0100115
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200116 /* Get current CPU context from SPMC context */
Achin Gupta86f23532019-10-11 15:41:16 +0100117 assert(cm_get_context(SECURE) == &(ctx->cpu_ctx));
118
119 /*
120 * The SPMD must have initiated the original request through a
121 * synchronous entry into SPMC. Jump back to the original C runtime
122 * context with the value of rc in x0;
123 */
124 spmd_spm_core_exit(ctx->c_rt_ctx, rc);
125
126 panic();
127}
128
129/*******************************************************************************
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200130 * Jump to the SPM Core for the first time.
Achin Gupta86f23532019-10-11 15:41:16 +0100131 ******************************************************************************/
132static int32_t spmd_init(void)
133{
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200134 spmd_spm_core_context_t *ctx = spmd_get_context();
135 uint64_t rc;
Achin Gupta86f23532019-10-11 15:41:16 +0100136
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200137 VERBOSE("SPM Core init start.\n");
Achin Gupta86f23532019-10-11 15:41:16 +0100138 ctx->state = SPMC_STATE_RESET;
139
140 rc = spmd_spm_core_sync_entry(ctx);
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200141 if (rc != 0ULL) {
Achin Gupta86f23532019-10-11 15:41:16 +0100142 ERROR("SPMC initialisation failed 0x%llx\n", rc);
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200143 return 0;
Achin Gupta86f23532019-10-11 15:41:16 +0100144 }
145
146 ctx->state = SPMC_STATE_IDLE;
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200147 VERBOSE("SPM Core init end.\n");
Achin Gupta86f23532019-10-11 15:41:16 +0100148
149 return 1;
150}
151
152/*******************************************************************************
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200153 * Loads SPMC manifest and inits SPMC.
Achin Gupta86f23532019-10-11 15:41:16 +0100154 ******************************************************************************/
Olivier Deprez69ca84a2020-02-07 15:44:43 +0100155static int spmd_spmc_init(void *pm_addr)
Achin Gupta86f23532019-10-11 15:41:16 +0100156{
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200157 spmd_spm_core_context_t *spm_ctx = spmd_get_context();
Achin Gupta86f23532019-10-11 15:41:16 +0100158 uint32_t ep_attr;
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200159 int rc;
Achin Gupta86f23532019-10-11 15:41:16 +0100160
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200161 /* Load the SPM Core manifest */
Olivier Deprez69ca84a2020-02-07 15:44:43 +0100162 rc = plat_spm_core_manifest_load(&spmc_attrs, pm_addr);
Max Shvetsov745889c2020-02-27 14:54:21 +0000163 if (rc != 0) {
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200164 WARN("No or invalid SPM Core manifest image provided by BL2\n");
165 return rc;
Achin Gupta86f23532019-10-11 15:41:16 +0100166 }
167
168 /*
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200169 * Ensure that the SPM Core version is compatible with the SPM
170 * Dispatcher version.
Achin Gupta86f23532019-10-11 15:41:16 +0100171 */
J-Alves2672cde2020-05-07 18:42:25 +0100172 if ((spmc_attrs.major_version != FFA_VERSION_MAJOR) ||
173 (spmc_attrs.minor_version > FFA_VERSION_MINOR)) {
174 WARN("Unsupported FFA version (%u.%u)\n",
Achin Gupta86f23532019-10-11 15:41:16 +0100175 spmc_attrs.major_version, spmc_attrs.minor_version);
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200176 return -EINVAL;
Achin Gupta86f23532019-10-11 15:41:16 +0100177 }
178
J-Alves2672cde2020-05-07 18:42:25 +0100179 VERBOSE("FFA version (%u.%u)\n", spmc_attrs.major_version,
Achin Gupta86f23532019-10-11 15:41:16 +0100180 spmc_attrs.minor_version);
181
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200182 VERBOSE("SPM Core run time EL%x.\n",
Max Shvetsove7fd80e2020-02-25 13:55:00 +0000183 SPMD_SPM_AT_SEL2 ? MODE_EL2 : MODE_EL1);
Achin Gupta86f23532019-10-11 15:41:16 +0100184
Max Shvetsove79062e2020-03-12 15:16:40 +0000185 /* Validate the SPMC ID, Ensure high bit is set */
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200186 if (((spmc_attrs.spmc_id >> SPMC_SECURE_ID_SHIFT) &
187 SPMC_SECURE_ID_MASK) == 0U) {
188 WARN("Invalid ID (0x%x) for SPMC.\n", spmc_attrs.spmc_id);
189 return -EINVAL;
Max Shvetsove79062e2020-03-12 15:16:40 +0000190 }
191
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200192 /* Validate the SPM Core execution state */
Achin Gupta86f23532019-10-11 15:41:16 +0100193 if ((spmc_attrs.exec_state != MODE_RW_64) &&
194 (spmc_attrs.exec_state != MODE_RW_32)) {
Olivier Deprez69ca84a2020-02-07 15:44:43 +0100195 WARN("Unsupported %s%x.\n", "SPM Core execution state 0x",
Achin Gupta86f23532019-10-11 15:41:16 +0100196 spmc_attrs.exec_state);
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200197 return -EINVAL;
Achin Gupta86f23532019-10-11 15:41:16 +0100198 }
199
Olivier Deprez69ca84a2020-02-07 15:44:43 +0100200 VERBOSE("%s%x.\n", "SPM Core execution state 0x",
201 spmc_attrs.exec_state);
Achin Gupta86f23532019-10-11 15:41:16 +0100202
Max Shvetsove7fd80e2020-02-25 13:55:00 +0000203#if SPMD_SPM_AT_SEL2
204 /* Ensure manifest has not requested AArch32 state in S-EL2 */
205 if (spmc_attrs.exec_state == MODE_RW_32) {
206 WARN("AArch32 state at S-EL2 is not supported.\n");
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200207 return -EINVAL;
Achin Gupta86f23532019-10-11 15:41:16 +0100208 }
209
210 /*
211 * Check if S-EL2 is supported on this system if S-EL2
212 * is required for SPM
213 */
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200214 if (!is_armv8_4_sel2_present()) {
215 WARN("SPM Core run time S-EL2 is not supported.\n");
216 return -EINVAL;
Achin Gupta86f23532019-10-11 15:41:16 +0100217 }
Max Shvetsove7fd80e2020-02-25 13:55:00 +0000218#endif /* SPMD_SPM_AT_SEL2 */
Achin Gupta86f23532019-10-11 15:41:16 +0100219
220 /* Initialise an entrypoint to set up the CPU context */
221 ep_attr = SECURE | EP_ST_ENABLE;
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200222 if ((read_sctlr_el3() & SCTLR_EE_BIT) != 0ULL) {
Achin Gupta86f23532019-10-11 15:41:16 +0100223 ep_attr |= EP_EE_BIG;
Max Shvetsov745889c2020-02-27 14:54:21 +0000224 }
225
Achin Gupta86f23532019-10-11 15:41:16 +0100226 SET_PARAM_HEAD(spmc_ep_info, PARAM_EP, VERSION_1, ep_attr);
227 assert(spmc_ep_info->pc == BL32_BASE);
228
229 /*
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200230 * Populate SPSR for SPM Core based upon validated parameters from the
231 * manifest.
Achin Gupta86f23532019-10-11 15:41:16 +0100232 */
233 if (spmc_attrs.exec_state == MODE_RW_32) {
234 spmc_ep_info->spsr = SPSR_MODE32(MODE32_svc, SPSR_T_ARM,
235 SPSR_E_LITTLE,
236 DAIF_FIQ_BIT |
237 DAIF_IRQ_BIT |
238 DAIF_ABT_BIT);
239 } else {
Max Shvetsove7fd80e2020-02-25 13:55:00 +0000240
241#if SPMD_SPM_AT_SEL2
242 static const uint32_t runtime_el = MODE_EL2;
243#else
244 static const uint32_t runtime_el = MODE_EL1;
245#endif
246 spmc_ep_info->spsr = SPSR_64(runtime_el,
Achin Gupta86f23532019-10-11 15:41:16 +0100247 MODE_SP_ELX,
248 DISABLE_ALL_EXCEPTIONS);
249 }
250
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200251 /* Initialise SPM Core context with this entry point information */
Max Shvetsov745889c2020-02-27 14:54:21 +0000252 cm_setup_context(&spm_ctx->cpu_ctx, spmc_ep_info);
253
254 /* Reuse PSCI affinity states to mark this SPMC context as off */
255 spm_ctx->state = AFF_STATE_OFF;
Achin Gupta86f23532019-10-11 15:41:16 +0100256
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200257 INFO("SPM Core setup done.\n");
Achin Gupta86f23532019-10-11 15:41:16 +0100258
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200259 /* Register init function for deferred init. */
Achin Gupta86f23532019-10-11 15:41:16 +0100260 bl31_register_bl32_init(&spmd_init);
261
262 return 0;
Max Shvetsov745889c2020-02-27 14:54:21 +0000263}
Achin Gupta86f23532019-10-11 15:41:16 +0100264
Max Shvetsov745889c2020-02-27 14:54:21 +0000265/*******************************************************************************
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200266 * Initialize context of SPM Core.
Max Shvetsov745889c2020-02-27 14:54:21 +0000267 ******************************************************************************/
268int spmd_setup(void)
269{
Olivier Deprez69ca84a2020-02-07 15:44:43 +0100270 void *spmc_manifest;
Max Shvetsov745889c2020-02-27 14:54:21 +0000271 int rc;
Achin Gupta86f23532019-10-11 15:41:16 +0100272
Max Shvetsov745889c2020-02-27 14:54:21 +0000273 spmc_ep_info = bl31_plat_get_next_image_ep_info(SECURE);
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200274 if (spmc_ep_info == NULL) {
275 WARN("No SPM Core image provided by BL2 boot loader.\n");
276 return -EINVAL;
Max Shvetsov745889c2020-02-27 14:54:21 +0000277 }
278
279 /* Under no circumstances will this parameter be 0 */
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200280 assert(spmc_ep_info->pc != 0ULL);
Max Shvetsov745889c2020-02-27 14:54:21 +0000281
282 /*
283 * Check if BL32 ep_info has a reference to 'tos_fw_config'. This will
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200284 * be used as a manifest for the SPM Core at the next lower EL/mode.
Max Shvetsov745889c2020-02-27 14:54:21 +0000285 */
Olivier Deprez69ca84a2020-02-07 15:44:43 +0100286 spmc_manifest = (void *)spmc_ep_info->args.arg0;
287 if (spmc_manifest == NULL) {
288 ERROR("Invalid or absent SPM Core manifest.\n");
289 return -EINVAL;
Max Shvetsov745889c2020-02-27 14:54:21 +0000290 }
291
292 /* Load manifest, init SPMC */
Olivier Deprez69ca84a2020-02-07 15:44:43 +0100293 rc = spmd_spmc_init(spmc_manifest);
Max Shvetsov745889c2020-02-27 14:54:21 +0000294 if (rc != 0) {
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200295 WARN("Booting device without SPM initialization.\n");
Max Shvetsov745889c2020-02-27 14:54:21 +0000296 }
297
Olivier Deprez69ca84a2020-02-07 15:44:43 +0100298 return rc;
Max Shvetsov745889c2020-02-27 14:54:21 +0000299}
300
301/*******************************************************************************
302 * Forward SMC to the other security state
303 ******************************************************************************/
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200304static uint64_t spmd_smc_forward(uint32_t smc_fid,
305 bool secure_origin,
306 uint64_t x1,
307 uint64_t x2,
308 uint64_t x3,
309 uint64_t x4,
310 void *handle)
Max Shvetsov745889c2020-02-27 14:54:21 +0000311{
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100312 uint32_t secure_state_in = (secure_origin) ? SECURE : NON_SECURE;
313 uint32_t secure_state_out = (!secure_origin) ? SECURE : NON_SECURE;
314
Max Shvetsov745889c2020-02-27 14:54:21 +0000315 /* Save incoming security state */
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100316 cm_el1_sysregs_context_save(secure_state_in);
Max Shvetsove7fd80e2020-02-25 13:55:00 +0000317#if SPMD_SPM_AT_SEL2
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100318 cm_el2_sysregs_context_save(secure_state_in);
Max Shvetsove7fd80e2020-02-25 13:55:00 +0000319#endif
Max Shvetsov745889c2020-02-27 14:54:21 +0000320
321 /* Restore outgoing security state */
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100322 cm_el1_sysregs_context_restore(secure_state_out);
Max Shvetsove7fd80e2020-02-25 13:55:00 +0000323#if SPMD_SPM_AT_SEL2
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100324 cm_el2_sysregs_context_restore(secure_state_out);
Max Shvetsove7fd80e2020-02-25 13:55:00 +0000325#endif
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100326 cm_set_next_eret_context(secure_state_out);
Max Shvetsov745889c2020-02-27 14:54:21 +0000327
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100328 SMC_RET8(cm_get_context(secure_state_out), smc_fid, x1, x2, x3, x4,
Max Shvetsov745889c2020-02-27 14:54:21 +0000329 SMC_GET_GP(handle, CTX_GPREG_X5),
330 SMC_GET_GP(handle, CTX_GPREG_X6),
331 SMC_GET_GP(handle, CTX_GPREG_X7));
332}
333
334/*******************************************************************************
J-Alves2672cde2020-05-07 18:42:25 +0100335 * Return FFA_ERROR with specified error code
Max Shvetsov745889c2020-02-27 14:54:21 +0000336 ******************************************************************************/
J-Alves2672cde2020-05-07 18:42:25 +0100337static uint64_t spmd_ffa_error_return(void *handle, int error_code)
Max Shvetsov745889c2020-02-27 14:54:21 +0000338{
J-Alves2672cde2020-05-07 18:42:25 +0100339 SMC_RET8(handle, FFA_ERROR,
340 FFA_TARGET_INFO_MBZ, error_code,
341 FFA_PARAM_MBZ, FFA_PARAM_MBZ, FFA_PARAM_MBZ,
342 FFA_PARAM_MBZ, FFA_PARAM_MBZ);
Achin Gupta86f23532019-10-11 15:41:16 +0100343}
344
345/*******************************************************************************
J-Alves2672cde2020-05-07 18:42:25 +0100346 * This function handles all SMCs in the range reserved for FFA. Each call is
Achin Gupta86f23532019-10-11 15:41:16 +0100347 * either forwarded to the other security state or handled by the SPM dispatcher
348 ******************************************************************************/
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200349uint64_t spmd_smc_handler(uint32_t smc_fid,
350 uint64_t x1,
351 uint64_t x2,
352 uint64_t x3,
353 uint64_t x4,
354 void *cookie,
355 void *handle,
Achin Gupta86f23532019-10-11 15:41:16 +0100356 uint64_t flags)
357{
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200358 spmd_spm_core_context_t *ctx = spmd_get_context();
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100359 bool secure_origin;
360 int32_t ret;
J-Alves4c95c702020-05-26 14:03:05 +0100361 uint32_t input_version;
Achin Gupta86f23532019-10-11 15:41:16 +0100362
363 /* Determine which security state this SMC originated from */
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100364 secure_origin = is_caller_secure(flags);
Achin Gupta86f23532019-10-11 15:41:16 +0100365
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200366 INFO("SPM: 0x%x 0x%llx 0x%llx 0x%llx 0x%llx 0x%llx 0x%llx 0x%llx\n",
Achin Gupta86f23532019-10-11 15:41:16 +0100367 smc_fid, x1, x2, x3, x4, SMC_GET_GP(handle, CTX_GPREG_X5),
368 SMC_GET_GP(handle, CTX_GPREG_X6),
369 SMC_GET_GP(handle, CTX_GPREG_X7));
370
371 switch (smc_fid) {
J-Alves2672cde2020-05-07 18:42:25 +0100372 case FFA_ERROR:
Achin Gupta86f23532019-10-11 15:41:16 +0100373 /*
374 * Check if this is the first invocation of this interface on
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200375 * this CPU. If so, then indicate that the SPM Core initialised
Achin Gupta86f23532019-10-11 15:41:16 +0100376 * unsuccessfully.
377 */
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100378 if (secure_origin && (ctx->state == SPMC_STATE_RESET)) {
Achin Gupta86f23532019-10-11 15:41:16 +0100379 spmd_spm_core_sync_exit(x2);
Max Shvetsov745889c2020-02-27 14:54:21 +0000380 }
Achin Gupta86f23532019-10-11 15:41:16 +0100381
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100382 return spmd_smc_forward(smc_fid, secure_origin,
Max Shvetsov745889c2020-02-27 14:54:21 +0000383 x1, x2, x3, x4, handle);
Achin Gupta86f23532019-10-11 15:41:16 +0100384 break; /* not reached */
385
J-Alves2672cde2020-05-07 18:42:25 +0100386 case FFA_VERSION:
J-Alves4c95c702020-05-26 14:03:05 +0100387 input_version = (uint32_t)(0xFFFFFFFF & x1);
Achin Gupta86f23532019-10-11 15:41:16 +0100388 /*
J-Alves4c95c702020-05-26 14:03:05 +0100389 * If caller is secure and SPMC was initialized,
390 * return FFA_VERSION of SPMD.
391 * If caller is non secure and SPMC was initialized,
392 * return SPMC's version.
393 * Sanity check to "input_version".
Achin Gupta86f23532019-10-11 15:41:16 +0100394 */
J-Alves4c95c702020-05-26 14:03:05 +0100395 if ((input_version & FFA_VERSION_BIT31_MASK) ||
396 (ctx->state == SPMC_STATE_RESET)) {
397 ret = FFA_ERROR_NOT_SUPPORTED;
398 } else if (!secure_origin) {
399 ret = MAKE_FFA_VERSION(spmc_attrs.major_version, spmc_attrs.minor_version);
400 } else {
401 ret = MAKE_FFA_VERSION(FFA_VERSION_MAJOR, FFA_VERSION_MINOR);
402 }
403
404 SMC_RET8(handle, ret, FFA_TARGET_INFO_MBZ, FFA_TARGET_INFO_MBZ,
J-Alves2672cde2020-05-07 18:42:25 +0100405 FFA_PARAM_MBZ, FFA_PARAM_MBZ, FFA_PARAM_MBZ,
406 FFA_PARAM_MBZ, FFA_PARAM_MBZ);
Achin Gupta86f23532019-10-11 15:41:16 +0100407 break; /* not reached */
408
J-Alves2672cde2020-05-07 18:42:25 +0100409 case FFA_FEATURES:
Achin Gupta86f23532019-10-11 15:41:16 +0100410 /*
411 * This is an optional interface. Do the minimal checks and
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200412 * forward to SPM Core which will handle it if implemented.
Achin Gupta86f23532019-10-11 15:41:16 +0100413 */
414
415 /*
J-Alves2672cde2020-05-07 18:42:25 +0100416 * Check if x1 holds a valid FFA fid. This is an
Achin Gupta86f23532019-10-11 15:41:16 +0100417 * optimization.
418 */
J-Alves2672cde2020-05-07 18:42:25 +0100419 if (!is_ffa_fid(x1)) {
420 return spmd_ffa_error_return(handle,
421 FFA_ERROR_NOT_SUPPORTED);
Max Shvetsov745889c2020-02-27 14:54:21 +0000422 }
Achin Gupta86f23532019-10-11 15:41:16 +0100423
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200424 /* Forward SMC from Normal world to the SPM Core */
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100425 if (!secure_origin) {
426 return spmd_smc_forward(smc_fid, secure_origin,
Max Shvetsov745889c2020-02-27 14:54:21 +0000427 x1, x2, x3, x4, handle);
Achin Gupta86f23532019-10-11 15:41:16 +0100428 }
Max Shvetsov745889c2020-02-27 14:54:21 +0000429
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200430 /*
431 * Return success if call was from secure world i.e. all
J-Alves2672cde2020-05-07 18:42:25 +0100432 * FFA functions are supported. This is essentially a
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200433 * nop.
434 */
J-Alves2672cde2020-05-07 18:42:25 +0100435 SMC_RET8(handle, FFA_SUCCESS_SMC32, x1, x2, x3, x4,
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200436 SMC_GET_GP(handle, CTX_GPREG_X5),
437 SMC_GET_GP(handle, CTX_GPREG_X6),
438 SMC_GET_GP(handle, CTX_GPREG_X7));
439
Achin Gupta86f23532019-10-11 15:41:16 +0100440 break; /* not reached */
441
J-Alves2672cde2020-05-07 18:42:25 +0100442 case FFA_ID_GET:
Max Shvetsove79062e2020-03-12 15:16:40 +0000443 /*
J-Alves2672cde2020-05-07 18:42:25 +0100444 * Returns the ID of the calling FFA component.
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200445 */
Max Shvetsove79062e2020-03-12 15:16:40 +0000446 if (!secure_origin) {
J-Alves2672cde2020-05-07 18:42:25 +0100447 SMC_RET8(handle, FFA_SUCCESS_SMC32,
448 FFA_TARGET_INFO_MBZ, FFA_NS_ENDPOINT_ID,
449 FFA_PARAM_MBZ, FFA_PARAM_MBZ,
450 FFA_PARAM_MBZ, FFA_PARAM_MBZ,
451 FFA_PARAM_MBZ);
Max Shvetsove79062e2020-03-12 15:16:40 +0000452 }
453
J-Alves2672cde2020-05-07 18:42:25 +0100454 SMC_RET8(handle, FFA_SUCCESS_SMC32,
455 FFA_TARGET_INFO_MBZ, spmc_attrs.spmc_id,
456 FFA_PARAM_MBZ, FFA_PARAM_MBZ,
457 FFA_PARAM_MBZ, FFA_PARAM_MBZ,
458 FFA_PARAM_MBZ);
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200459
Max Shvetsove79062e2020-03-12 15:16:40 +0000460 break; /* not reached */
461
J-Alves2672cde2020-05-07 18:42:25 +0100462 case FFA_RX_RELEASE:
463 case FFA_RXTX_MAP_SMC32:
464 case FFA_RXTX_MAP_SMC64:
465 case FFA_RXTX_UNMAP:
466 case FFA_MSG_RUN:
Achin Gupta86f23532019-10-11 15:41:16 +0100467 /* This interface must be invoked only by the Normal world */
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100468 if (secure_origin) {
J-Alves2672cde2020-05-07 18:42:25 +0100469 return spmd_ffa_error_return(handle,
470 FFA_ERROR_NOT_SUPPORTED);
Achin Gupta86f23532019-10-11 15:41:16 +0100471 }
472
473 /* Fall through to forward the call to the other world */
474
J-Alves2672cde2020-05-07 18:42:25 +0100475 case FFA_PARTITION_INFO_GET:
476 case FFA_MSG_SEND:
477 case FFA_MSG_SEND_DIRECT_REQ_SMC32:
478 case FFA_MSG_SEND_DIRECT_REQ_SMC64:
479 case FFA_MSG_SEND_DIRECT_RESP_SMC32:
480 case FFA_MSG_SEND_DIRECT_RESP_SMC64:
481 case FFA_MEM_DONATE_SMC32:
482 case FFA_MEM_DONATE_SMC64:
483 case FFA_MEM_LEND_SMC32:
484 case FFA_MEM_LEND_SMC64:
485 case FFA_MEM_SHARE_SMC32:
486 case FFA_MEM_SHARE_SMC64:
487 case FFA_MEM_RETRIEVE_REQ_SMC32:
488 case FFA_MEM_RETRIEVE_REQ_SMC64:
489 case FFA_MEM_RETRIEVE_RESP:
490 case FFA_MEM_RELINQUISH:
491 case FFA_MEM_RECLAIM:
492 case FFA_SUCCESS_SMC32:
493 case FFA_SUCCESS_SMC64:
Achin Gupta86f23532019-10-11 15:41:16 +0100494 /*
495 * TODO: Assume that no requests originate from EL3 at the
496 * moment. This will change if a SP service is required in
497 * response to secure interrupts targeted to EL3. Until then
498 * simply forward the call to the Normal world.
499 */
500
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100501 return spmd_smc_forward(smc_fid, secure_origin,
Max Shvetsov745889c2020-02-27 14:54:21 +0000502 x1, x2, x3, x4, handle);
Achin Gupta86f23532019-10-11 15:41:16 +0100503 break; /* not reached */
504
J-Alves2672cde2020-05-07 18:42:25 +0100505 case FFA_MSG_WAIT:
Achin Gupta86f23532019-10-11 15:41:16 +0100506 /*
507 * Check if this is the first invocation of this interface on
508 * this CPU from the Secure world. If so, then indicate that the
Olivier Deprez2bae35f2020-04-16 13:39:06 +0200509 * SPM Core initialised successfully.
Achin Gupta86f23532019-10-11 15:41:16 +0100510 */
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100511 if (secure_origin && (ctx->state == SPMC_STATE_RESET)) {
Achin Gupta86f23532019-10-11 15:41:16 +0100512 spmd_spm_core_sync_exit(0);
513 }
514
Max Shvetsov745889c2020-02-27 14:54:21 +0000515 /* Fall through to forward the call to the other world */
Achin Gupta86f23532019-10-11 15:41:16 +0100516
J-Alves2672cde2020-05-07 18:42:25 +0100517 case FFA_MSG_YIELD:
Achin Gupta86f23532019-10-11 15:41:16 +0100518 /* This interface must be invoked only by the Secure world */
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100519 if (!secure_origin) {
J-Alves2672cde2020-05-07 18:42:25 +0100520 return spmd_ffa_error_return(handle,
521 FFA_ERROR_NOT_SUPPORTED);
Achin Gupta86f23532019-10-11 15:41:16 +0100522 }
523
Olivier Deprez41ff36a2019-12-23 16:21:12 +0100524 return spmd_smc_forward(smc_fid, secure_origin,
Max Shvetsov745889c2020-02-27 14:54:21 +0000525 x1, x2, x3, x4, handle);
Achin Gupta86f23532019-10-11 15:41:16 +0100526 break; /* not reached */
527
528 default:
529 WARN("SPM: Unsupported call 0x%08x\n", smc_fid);
J-Alves2672cde2020-05-07 18:42:25 +0100530 return spmd_ffa_error_return(handle, FFA_ERROR_NOT_SUPPORTED);
Achin Gupta86f23532019-10-11 15:41:16 +0100531 }
532}