blob: b5a7db140000b2b25e4bc00a7c2e14661b7be62b [file] [log] [blame]
Dan Handley9df48042015-03-19 18:58:55 +00001/*
Rohit Mathewf085b872023-12-20 17:29:18 +00002 * Copyright (c) 2015-2024, Arm Limited and Contributors. All rights reserved.
Dan Handley9df48042015-03-19 18:58:55 +00003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Dan Handley9df48042015-03-19 18:58:55 +00005 */
6
Yatharth Kocharf9a0f162016-09-13 17:07:57 +01007#include <assert.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00008#include <string.h>
9
10#include <platform_def.h>
11
Zelalem Aweke5085abd2021-07-13 17:19:54 -050012#include <arch_features.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000013#include <arch_helpers.h>
14#include <common/bl_common.h>
15#include <common/debug.h>
16#include <common/desc_image_load.h>
17#include <drivers/generic_delay_timer.h>
Manish V Badarkhedd6f2522021-02-22 17:30:17 +000018#include <drivers/partition/partition.h>
Louis Mayencourt81bd9162019-10-17 15:14:25 +010019#include <lib/fconf/fconf.h>
Manish V Badarkhe99a8e142020-06-11 22:32:11 +010020#include <lib/fconf/fconf_dyn_cfg_getter.h>
johpow019d134022021-06-16 17:57:28 -050021#include <lib/gpt_rme/gpt_rme.h>
Harrison Mutai91ce7c92023-12-01 15:50:00 +000022#if TRANSFER_LIST
23#include <lib/transfer_list.h>
24#endif
Summer Qin9db8f2e2017-04-24 16:49:28 +010025#ifdef SPD_opteed
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000026#include <lib/optee_utils.h>
Summer Qin9db8f2e2017-04-24 16:49:28 +010027#endif
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000028#include <lib/utils.h>
Antonio Nino Diazbd7b7402019-01-25 14:30:04 +000029#include <plat/arm/common/plat_arm.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +000030#include <plat/common/platform.h>
31
Dan Handley9df48042015-03-19 18:58:55 +000032/* Data structure which holds the extents of the trusted SRAM for BL2 */
33static meminfo_t bl2_tzram_layout __aligned(CACHE_WRITEBACK_GRANULE);
34
Manish V Badarkhe5e3ef6c2020-07-16 05:45:25 +010035/* Base address of fw_config received from BL1 */
Harrison Mutaibc823e22023-12-22 18:42:27 +000036static uintptr_t config_base __unused;
Manish V Badarkhe5e3ef6c2020-07-16 05:45:25 +010037
Soby Mathewc44110d2018-02-20 12:50:47 +000038/*
Manish V Badarkhe1da211a2020-05-31 10:17:59 +010039 * Check that BL2_BASE is above ARM_FW_CONFIG_LIMIT. This reserved page is
Soby Mathewaf14b462018-06-01 16:53:38 +010040 * for `meminfo_t` data structure and fw_configs passed from BL1.
Soby Mathewc44110d2018-02-20 12:50:47 +000041 */
Harrison Mutaibc823e22023-12-22 18:42:27 +000042#if TRANSFER_LIST
43CASSERT(BL2_BASE >= PLAT_ARM_EL3_FW_HANDOFF_BASE + PLAT_ARM_FW_HANDOFF_SIZE,
44 assert_bl2_base_overflows);
45#else
Manish V Badarkhe1da211a2020-05-31 10:17:59 +010046CASSERT(BL2_BASE >= ARM_FW_CONFIG_LIMIT, assert_bl2_base_overflows);
Harrison Mutaibc823e22023-12-22 18:42:27 +000047#endif /* TRANSFER_LIST */
Soby Mathewc44110d2018-02-20 12:50:47 +000048
Yatharth Kocharf9a0f162016-09-13 17:07:57 +010049/* Weak definitions may be overridden in specific ARM standard platform */
Soby Mathew7d5a2e72018-01-10 15:59:31 +000050#pragma weak bl2_early_platform_setup2
Yatharth Kocharf9a0f162016-09-13 17:07:57 +010051#pragma weak bl2_platform_setup
52#pragma weak bl2_plat_arch_setup
53#pragma weak bl2_plat_sec_mem_layout
54
Zelalem Aweke65e92632021-07-12 22:33:55 -050055#if ENABLE_RME
Daniel Boulby45a2c9e2018-07-06 16:54:44 +010056#define MAP_BL2_TOTAL MAP_REGION_FLAT( \
57 bl2_tzram_layout.total_base, \
58 bl2_tzram_layout.total_size, \
Zelalem Aweke65e92632021-07-12 22:33:55 -050059 MT_MEMORY | MT_RW | MT_ROOT)
60#else
61#define MAP_BL2_TOTAL MAP_REGION_FLAT( \
62 bl2_tzram_layout.total_base, \
63 bl2_tzram_layout.total_size, \
Daniel Boulby45a2c9e2018-07-06 16:54:44 +010064 MT_MEMORY | MT_RW | MT_SECURE)
Zelalem Aweke65e92632021-07-12 22:33:55 -050065#endif /* ENABLE_RME */
Dimitris Papastamos9576baa2018-06-08 13:17:26 +010066
Daniel Boulby07d26872018-06-27 16:45:48 +010067#pragma weak arm_bl2_plat_handle_post_image_load
Dimitris Papastamos9576baa2018-06-08 13:17:26 +010068
Harrison Mutai91ce7c92023-12-01 15:50:00 +000069static struct transfer_list_header *secure_tl __unused;
70static struct transfer_list_header *ns_tl __unused;
71
Dan Handley9df48042015-03-19 18:58:55 +000072/*******************************************************************************
73 * BL1 has passed the extents of the trusted SRAM that should be visible to BL2
74 * in x0. This memory layout is sitting at the base of the free trusted SRAM.
75 * Copy it to a safe location before its reclaimed by later BL2 functionality.
76 ******************************************************************************/
Manish V Badarkhe1da211a2020-05-31 10:17:59 +010077void arm_bl2_early_platform_setup(uintptr_t fw_config,
Sandrine Bailleuxb3b6e222018-07-11 12:44:22 +020078 struct meminfo *mem_layout)
Dan Handley9df48042015-03-19 18:58:55 +000079{
Harrison Mutaibc823e22023-12-22 18:42:27 +000080 struct transfer_list_entry *te __unused;
Govindraj Raja70154422023-10-24 14:50:23 -050081 int __maybe_unused ret;
82
Dan Handley9df48042015-03-19 18:58:55 +000083 /* Initialize the console to provide early debug support */
Antonio Nino Diaz23ede6a2018-06-19 09:29:36 +010084 arm_console_boot_init();
Dan Handley9df48042015-03-19 18:58:55 +000085
Harrison Mutaibc823e22023-12-22 18:42:27 +000086#if TRANSFER_LIST
87 // TODO: modify the prototype of this function fw_config != bl2_tl
88 secure_tl = (struct transfer_list_header *)fw_config;
Dan Handley9df48042015-03-19 18:58:55 +000089
Harrison Mutaibc823e22023-12-22 18:42:27 +000090 te = transfer_list_find(secure_tl, TL_TAG_SRAM_LAYOUT64);
91 assert(te != NULL);
92
93 bl2_tzram_layout = *(meminfo_t *)transfer_list_entry_data(te);
94 transfer_list_rem(secure_tl, te);
95#else
Jimmy Brissond7297c72020-08-05 14:05:53 -050096 config_base = fw_config;
Louis Mayencourt81bd9162019-10-17 15:14:25 +010097
Harrison Mutaibc823e22023-12-22 18:42:27 +000098 /* Setup the BL2 memory layout */
99 bl2_tzram_layout = *mem_layout;
100#endif
101
Dan Handley9df48042015-03-19 18:58:55 +0000102 /* Initialise the IO layer and register platform IO devices */
103 plat_arm_io_setup();
Manish V Badarkhedd6f2522021-02-22 17:30:17 +0000104
105 /* Load partition table */
106#if ARM_GPT_SUPPORT
Govindraj Raja70154422023-10-24 14:50:23 -0500107 ret = gpt_partition_init();
108 if (ret != 0) {
109 ERROR("GPT partition initialisation failed!\n");
110 panic();
111 }
Manish V Badarkhedd6f2522021-02-22 17:30:17 +0000112
Govindraj Raja70154422023-10-24 14:50:23 -0500113#endif /* ARM_GPT_SUPPORT */
Dan Handley9df48042015-03-19 18:58:55 +0000114}
115
Soby Mathew7d5a2e72018-01-10 15:59:31 +0000116void bl2_early_platform_setup2(u_register_t arg0, u_register_t arg1, u_register_t arg2, u_register_t arg3)
Dan Handley9df48042015-03-19 18:58:55 +0000117{
Soby Mathew96a1c6b2018-01-15 14:45:33 +0000118 arm_bl2_early_platform_setup((uintptr_t)arg0, (meminfo_t *)arg1);
119
Soby Mathew1ced6b82017-06-12 12:37:10 +0100120 generic_delay_timer_init();
Dan Handley9df48042015-03-19 18:58:55 +0000121}
122
123/*
Soby Mathew45e39e22018-03-26 15:16:46 +0100124 * Perform BL2 preload setup. Currently we initialise the dynamic
125 * configuration here.
Dan Handley9df48042015-03-19 18:58:55 +0000126 */
Soby Mathew45e39e22018-03-26 15:16:46 +0100127void bl2_plat_preload_setup(void)
Dan Handley9df48042015-03-19 18:58:55 +0000128{
Harrison Mutai91ce7c92023-12-01 15:50:00 +0000129#if TRANSFER_LIST
Harrison Mutai4809a762024-04-23 10:31:36 +0000130/* Assume the secure TL hasn't been initialised if BL2 is running at EL3. */
131#if RESET_TO_BL2
132 secure_tl = transfer_list_init((void *)PLAT_ARM_EL3_FW_HANDOFF_BASE,
133 PLAT_ARM_FW_HANDOFF_SIZE);
134
135 if (secure_tl == NULL) {
136 ERROR("Secure transfer list initialisation failed!\n");
137 panic();
138 }
139#endif
140
Harrison Mutai91ce7c92023-12-01 15:50:00 +0000141 arm_transfer_list_dyn_cfg_init(secure_tl);
142#else
Soby Mathew96a1c6b2018-01-15 14:45:33 +0000143 arm_bl2_dyn_cfg_init();
Harrison Mutai91ce7c92023-12-01 15:50:00 +0000144#endif
Manish V Badarkhedd6f2522021-02-22 17:30:17 +0000145
Manish V Badarkhed2f0a7a2021-06-25 23:43:33 +0100146#if ARM_GPT_SUPPORT && !PSA_FWU_SUPPORT
147 /* Always use the FIP from bank 0 */
148 arm_set_fip_addr(0U);
149#endif /* ARM_GPT_SUPPORT && !PSA_FWU_SUPPORT */
Soby Mathew45e39e22018-03-26 15:16:46 +0100150}
Soby Mathew96a1c6b2018-01-15 14:45:33 +0000151
Soby Mathew45e39e22018-03-26 15:16:46 +0100152/*
153 * Perform ARM standard platform setup.
154 */
155void arm_bl2_platform_setup(void)
156{
Zelalem Aweke5085abd2021-07-13 17:19:54 -0500157#if !ENABLE_RME
Dan Handley9df48042015-03-19 18:58:55 +0000158 /* Initialize the secure environment */
159 plat_arm_security_setup();
Zelalem Aweke5085abd2021-07-13 17:19:54 -0500160#endif
Roberto Vargasa1c16b62017-08-03 09:16:43 +0100161
162#if defined(PLAT_ARM_MEM_PROT_ADDR)
Roberto Vargas550eb082018-01-05 16:00:05 +0000163 arm_nor_psci_do_static_mem_protect();
Roberto Vargasa1c16b62017-08-03 09:16:43 +0100164#endif
Dan Handley9df48042015-03-19 18:58:55 +0000165}
166
167void bl2_platform_setup(void)
168{
169 arm_bl2_platform_setup();
170}
Zelalem Aweke5085abd2021-07-13 17:19:54 -0500171
Dan Handley9df48042015-03-19 18:58:55 +0000172/*******************************************************************************
Zelalem Aweke5085abd2021-07-13 17:19:54 -0500173 * Perform the very early platform specific architectural setup here.
174 * When RME is enabled the secure environment is initialised before
175 * initialising and enabling Granule Protection.
176 * This function initialises the MMU in a quick and dirty way.
Dan Handley9df48042015-03-19 18:58:55 +0000177 ******************************************************************************/
178void arm_bl2_plat_arch_setup(void)
179{
Sandrine Bailleux2f37ce62023-10-26 15:14:42 +0200180#if USE_COHERENT_MEM
181 /* Ensure ARM platforms don't use coherent memory in BL2. */
Daniel Boulby45a2c9e2018-07-06 16:54:44 +0100182 assert((BL_COHERENT_RAM_END - BL_COHERENT_RAM_BASE) == 0U);
Dan Handley9df48042015-03-19 18:58:55 +0000183#endif
Daniel Boulby45a2c9e2018-07-06 16:54:44 +0100184
185 const mmap_region_t bl_regions[] = {
186 MAP_BL2_TOTAL,
Daniel Boulby4e97abd2018-07-16 14:09:15 +0100187 ARM_MAP_BL_RO,
Roberto Vargase3adc372018-05-23 09:27:06 +0100188#if USE_ROMLIB
189 ARM_MAP_ROMLIB_CODE,
190 ARM_MAP_ROMLIB_DATA,
191#endif
Harrison Mutaibc823e22023-12-22 18:42:27 +0000192#if !TRANSFER_LIST
Manish V Badarkhe5e3ef6c2020-07-16 05:45:25 +0100193 ARM_MAP_BL_CONFIG_REGION,
Harrison Mutaibc823e22023-12-22 18:42:27 +0000194#endif /* TRANSFER_LIST */
Zelalem Awekec43c5632021-07-12 23:41:05 -0500195#if ENABLE_RME
196 ARM_MAP_L0_GPT_REGION,
197#endif
Harrison Mutaibc823e22023-12-22 18:42:27 +0000198 { 0 }
Daniel Boulby45a2c9e2018-07-06 16:54:44 +0100199 };
200
Zelalem Aweke5085abd2021-07-13 17:19:54 -0500201#if ENABLE_RME
202 /* Initialise the secure environment */
203 plat_arm_security_setup();
Zelalem Aweke5085abd2021-07-13 17:19:54 -0500204#endif
Roberto Vargas344ff022018-10-19 16:44:18 +0100205 setup_page_tables(bl_regions, plat_arm_get_mmap());
Yatharth Kochara5f77d32016-07-04 11:26:14 +0100206
Julius Werner8e0ef0f2019-07-09 14:02:43 -0700207#ifdef __aarch64__
Zelalem Aweke5085abd2021-07-13 17:19:54 -0500208#if ENABLE_RME
209 /* BL2 runs in EL3 when RME enabled. */
Sona Mathew9e505f92024-03-13 11:33:54 -0500210 assert(is_feat_rme_present());
Zelalem Aweke5085abd2021-07-13 17:19:54 -0500211 enable_mmu_el3(0);
johpow019d134022021-06-16 17:57:28 -0500212
213 /* Initialise and enable granule protection after MMU. */
Rohit Mathewf6f02da2024-01-21 22:49:08 +0000214 arm_gpt_setup();
Zelalem Aweke5085abd2021-07-13 17:19:54 -0500215#else
Sandrine Bailleux4a1267a2016-05-18 16:11:47 +0100216 enable_mmu_el1(0);
Zelalem Aweke5085abd2021-07-13 17:19:54 -0500217#endif
Julius Werner8e0ef0f2019-07-09 14:02:43 -0700218#else
219 enable_mmu_svc_mon(0);
Yatharth Kochara5f77d32016-07-04 11:26:14 +0100220#endif
Roberto Vargase3adc372018-05-23 09:27:06 +0100221
222 arm_setup_romlib();
Dan Handley9df48042015-03-19 18:58:55 +0000223}
224
225void bl2_plat_arch_setup(void)
226{
Harrison Mutaibc823e22023-12-22 18:42:27 +0000227 const struct dyn_cfg_dtb_info_t *tb_fw_config_info __unused;
228 struct transfer_list_entry *te __unused;
Dan Handley9df48042015-03-19 18:58:55 +0000229 arm_bl2_plat_arch_setup();
Manish V Badarkhe5e3ef6c2020-07-16 05:45:25 +0100230
Harrison Mutaibc823e22023-12-22 18:42:27 +0000231#if TRANSFER_LIST
232 te = transfer_list_find(secure_tl, TL_TAG_TB_FW_CONFIG);
233 assert(te != NULL);
234
235 fconf_populate("TB_FW", (uintptr_t)transfer_list_entry_data(te));
236 transfer_list_rem(secure_tl, te);
237#else
Manish V Badarkhe5e3ef6c2020-07-16 05:45:25 +0100238 /* Fill the properties struct with the info from the config dtb */
Jimmy Brissond7297c72020-08-05 14:05:53 -0500239 fconf_populate("FW_CONFIG", config_base);
Manish V Badarkhe5e3ef6c2020-07-16 05:45:25 +0100240
241 /* TB_FW_CONFIG was also loaded by BL1 */
242 tb_fw_config_info = FCONF_GET_PROPERTY(dyn_cfg, dtb, TB_FW_CONFIG_ID);
243 assert(tb_fw_config_info != NULL);
244
245 fconf_populate("TB_FW", tb_fw_config_info->config_addr);
Harrison Mutaibc823e22023-12-22 18:42:27 +0000246#endif
Dan Handley9df48042015-03-19 18:58:55 +0000247}
248
Yatharth Kocharede39cb2016-11-14 12:01:04 +0000249int arm_bl2_handle_post_image_load(unsigned int image_id)
Yatharth Kocharf9a0f162016-09-13 17:07:57 +0100250{
251 int err = 0;
252 bl_mem_params_node_t *bl_mem_params = get_bl_mem_params_node(image_id);
Summer Qin9db8f2e2017-04-24 16:49:28 +0100253#ifdef SPD_opteed
254 bl_mem_params_node_t *pager_mem_params = NULL;
255 bl_mem_params_node_t *paged_mem_params = NULL;
256#endif
Zelaleme8dadb12020-02-05 14:12:39 -0600257 assert(bl_mem_params != NULL);
Yatharth Kocharf9a0f162016-09-13 17:07:57 +0100258
259 switch (image_id) {
Julius Werner8e0ef0f2019-07-09 14:02:43 -0700260#ifdef __aarch64__
Yatharth Kocharf9a0f162016-09-13 17:07:57 +0100261 case BL32_IMAGE_ID:
Summer Qin9db8f2e2017-04-24 16:49:28 +0100262#ifdef SPD_opteed
263 pager_mem_params = get_bl_mem_params_node(BL32_EXTRA1_IMAGE_ID);
264 assert(pager_mem_params);
265
266 paged_mem_params = get_bl_mem_params_node(BL32_EXTRA2_IMAGE_ID);
267 assert(paged_mem_params);
268
269 err = parse_optee_header(&bl_mem_params->ep_info,
270 &pager_mem_params->image_info,
271 &paged_mem_params->image_info);
272 if (err != 0) {
273 WARN("OPTEE header parse error.\n");
274 }
275#endif
Yatharth Kocharf9a0f162016-09-13 17:07:57 +0100276 bl_mem_params->ep_info.spsr = arm_get_spsr_for_bl32_entry();
277 break;
Yatharth Kochara5f77d32016-07-04 11:26:14 +0100278#endif
Yatharth Kocharf9a0f162016-09-13 17:07:57 +0100279
280 case BL33_IMAGE_ID:
281 /* BL33 expects to receive the primary CPU MPID (through r0) */
282 bl_mem_params->ep_info.args.arg0 = 0xffff & read_mpidr();
283 bl_mem_params->ep_info.spsr = arm_get_spsr_for_bl33_entry();
284 break;
285
286#ifdef SCP_BL2_BASE
287 case SCP_BL2_IMAGE_ID:
288 /* The subsequent handling of SCP_BL2 is platform specific */
289 err = plat_arm_bl2_handle_scp_bl2(&bl_mem_params->image_info);
290 if (err) {
291 WARN("Failure in platform-specific handling of SCP_BL2 image.\n");
292 }
293 break;
294#endif
Jonathan Wrightff957ed2018-03-14 15:24:00 +0000295 default:
296 /* Do nothing in default case */
297 break;
Yatharth Kocharf9a0f162016-09-13 17:07:57 +0100298 }
299
300 return err;
301}
302
Yatharth Kocharede39cb2016-11-14 12:01:04 +0000303/*******************************************************************************
304 * This function can be used by the platforms to update/use image
305 * information for given `image_id`.
306 ******************************************************************************/
Daniel Boulby07d26872018-06-27 16:45:48 +0100307int arm_bl2_plat_handle_post_image_load(unsigned int image_id)
Yatharth Kocharede39cb2016-11-14 12:01:04 +0000308{
Balint Dobszay719ba9c2021-03-26 16:23:18 +0100309#if defined(SPD_spmd) && BL2_ENABLE_SP_LOAD
Manish Pandey1fa6ecb2020-02-25 11:38:19 +0000310 /* For Secure Partitions we don't need post processing */
311 if ((image_id >= (MAX_NUMBER_IDS - MAX_SP_IDS)) &&
312 (image_id < MAX_NUMBER_IDS)) {
313 return 0;
314 }
315#endif
Harrison Mutai91ce7c92023-12-01 15:50:00 +0000316
317#if TRANSFER_LIST
318 if (image_id == HW_CONFIG_ID) {
Harrison Mutai32a5dbc2024-07-12 14:23:02 +0000319 /* Refresh the now stale checksum following loading of HW_CONFIG into the TL. */
320 transfer_list_update_checksum(secure_tl);
Harrison Mutai91ce7c92023-12-01 15:50:00 +0000321 }
322#endif /* TRANSFER_LIST */
323
Yatharth Kocharede39cb2016-11-14 12:01:04 +0000324 return arm_bl2_handle_post_image_load(image_id);
325}
Harrison Mutai91ce7c92023-12-01 15:50:00 +0000326
327void arm_bl2_setup_next_ep_info(bl_mem_params_node_t *next_param_node)
328{
Harrison Mutai433bb972024-07-03 09:55:16 +0000329 entry_point_info_t *ep __unused;
330 ep = transfer_list_set_handoff_args(secure_tl,
331 &next_param_node->ep_info);
332 assert(ep != NULL);
Harrison Mutai91ce7c92023-12-01 15:50:00 +0000333
Harrison Mutai32a5dbc2024-07-12 14:23:02 +0000334 arm_transfer_list_populate_ep_info(next_param_node, secure_tl);
Harrison Mutai91ce7c92023-12-01 15:50:00 +0000335}