Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 1 | /* |
Michal Simek | 2a47faa | 2023-04-14 08:43:51 +0200 | [diff] [blame] | 2 | * Copyright (c) 2014-2020, Arm Limited and Contributors. All rights reserved. |
Prasad Kummari | e078311 | 2023-04-26 11:02:07 +0530 | [diff] [blame] | 3 | * Copyright (c) 2023, Advanced Micro Devices, Inc. All rights reserved. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 4 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 5 | * SPDX-License-Identifier: BSD-3-Clause |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 6 | */ |
| 7 | |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 8 | #include <assert.h> |
Scott Branden | e5dcf98 | 2020-08-25 13:49:32 -0700 | [diff] [blame] | 9 | #include <inttypes.h> |
| 10 | #include <stdint.h> |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 11 | |
| 12 | #include <arch_helpers.h> |
| 13 | #include <common/debug.h> |
Venkatesh Yadav Abbarapu | 1463dd5 | 2020-01-07 03:25:16 -0700 | [diff] [blame] | 14 | #include <plat_startup.h> |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 15 | |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 16 | |
| 17 | /* |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 18 | * HandoffParams |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 19 | * Parameter bitfield encoding |
| 20 | * ----------------------------------------------------------------------------- |
| 21 | * Exec State 0 0 -> Aarch64, 1-> Aarch32 |
Soren Brinkmann | 8bcd305 | 2016-05-29 09:48:26 -0700 | [diff] [blame] | 22 | * endianness 1 0 -> LE, 1 -> BE |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 23 | * secure (TZ) 2 0 -> Non secure, 1 -> secure |
| 24 | * EL 3:4 00 -> EL0, 01 -> EL1, 10 -> EL2, 11 -> EL3 |
| 25 | * CPU# 5:6 00 -> A53_0, 01 -> A53_1, 10 -> A53_2, 11 -> A53_3 |
| 26 | */ |
| 27 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 28 | #define XBL_FLAGS_ESTATE_SHIFT 0U |
| 29 | #define XBL_FLAGS_ESTATE_MASK (1U << XBL_FLAGS_ESTATE_SHIFT) |
| 30 | #define XBL_FLAGS_ESTATE_A64 0U |
| 31 | #define XBL_FLAGS_ESTATE_A32 1U |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 32 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 33 | #define XBL_FLAGS_ENDIAN_SHIFT 1U |
| 34 | #define XBL_FLAGS_ENDIAN_MASK (1U << XBL_FLAGS_ENDIAN_SHIFT) |
| 35 | #define XBL_FLAGS_ENDIAN_LE 0U |
| 36 | #define XBL_FLAGS_ENDIAN_BE 1U |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 37 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 38 | #define XBL_FLAGS_TZ_SHIFT 2U |
| 39 | #define XBL_FLAGS_TZ_MASK (1U << XBL_FLAGS_TZ_SHIFT) |
| 40 | #define XBL_FLAGS_NON_SECURE 0U |
| 41 | #define XBL_FLAGS_SECURE 1U |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 42 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 43 | #define XBL_FLAGS_EL_SHIFT 3U |
| 44 | #define XBL_FLAGS_EL_MASK (3U << XBL_FLAGS_EL_SHIFT) |
| 45 | #define XBL_FLAGS_EL0 0U |
| 46 | #define XBL_FLAGS_EL1 1U |
| 47 | #define XBL_FLAGS_EL2 2U |
| 48 | #define XBL_FLAGS_EL3 3U |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 49 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 50 | #define XBL_FLAGS_CPU_SHIFT 5U |
| 51 | #define XBL_FLAGS_CPU_MASK (3U << XBL_FLAGS_CPU_SHIFT) |
| 52 | #define XBL_FLAGS_A53_0 0U |
| 53 | #define XBL_FLAGS_A53_1 1U |
| 54 | #define XBL_FLAGS_A53_2 2U |
| 55 | #define XBL_FLAGS_A53_3 3U |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 56 | |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 57 | /** |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 58 | * get_xbl_cpu() - Get the target CPU for partition. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 59 | * @partition: Pointer to partition struct. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 60 | * |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 61 | * Return: XBL_FLAGS_A53_0, XBL_FLAGS_A53_1, XBL_FLAGS_A53_2 or XBL_FLAGS_A53_3. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 62 | * |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 63 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 64 | static int32_t get_xbl_cpu(const struct xbl_partition *partition) |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 65 | { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 66 | uint64_t flags = partition->flags & XBL_FLAGS_CPU_MASK; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 67 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 68 | return flags >> XBL_FLAGS_CPU_SHIFT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 69 | } |
| 70 | |
| 71 | /** |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 72 | * get_xbl_el() - Get the target exception level for partition. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 73 | * @partition: Pointer to partition struct. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 74 | * |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 75 | * Return: XBL_FLAGS_EL0, XBL_FLAGS_EL1, XBL_FLAGS_EL2 or XBL_FLAGS_EL3. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 76 | * |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 77 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 78 | static int32_t get_xbl_el(const struct xbl_partition *partition) |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 79 | { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 80 | uint64_t flags = partition->flags & XBL_FLAGS_EL_MASK; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 81 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 82 | return flags >> XBL_FLAGS_EL_SHIFT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 83 | } |
| 84 | |
| 85 | /** |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 86 | * get_xbl_ss() - Get the target security state for partition. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 87 | * @partition: Pointer to partition struct. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 88 | * |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 89 | * Return: XBL_FLAGS_NON_SECURE or XBL_FLAGS_SECURE. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 90 | * |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 91 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 92 | static int32_t get_xbl_ss(const struct xbl_partition *partition) |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 93 | { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 94 | uint64_t flags = partition->flags & XBL_FLAGS_TZ_MASK; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 95 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 96 | return flags >> XBL_FLAGS_TZ_SHIFT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 97 | } |
| 98 | |
| 99 | /** |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 100 | * get_xbl_endian() - Get the target endianness for partition. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 101 | * @partition: Pointer to partition struct. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 102 | * |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 103 | * Return: SPSR_E_LITTLE or SPSR_E_BIG. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 104 | * |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 105 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 106 | static int32_t get_xbl_endian(const struct xbl_partition *partition) |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 107 | { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 108 | uint64_t flags = partition->flags & XBL_FLAGS_ENDIAN_MASK; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 109 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 110 | flags >>= XBL_FLAGS_ENDIAN_SHIFT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 111 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 112 | if (flags == XBL_FLAGS_ENDIAN_BE) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 113 | return SPSR_E_BIG; |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 114 | } else { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 115 | return SPSR_E_LITTLE; |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 116 | } |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 117 | } |
| 118 | |
| 119 | /** |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 120 | * get_xbl_estate() - Get the target execution state for partition. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 121 | * @partition: Pointer to partition struct. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 122 | * |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 123 | * Return: XBL_FLAGS_ESTATE_A32 or XBL_FLAGS_ESTATE_A64. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 124 | * |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 125 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 126 | static int32_t get_xbl_estate(const struct xbl_partition *partition) |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 127 | { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 128 | uint64_t flags = partition->flags & XBL_FLAGS_ESTATE_MASK; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 129 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 130 | return flags >> XBL_FLAGS_ESTATE_SHIFT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 131 | } |
| 132 | |
| 133 | /** |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 134 | * xbl_tfa_handover() - Populates the bl32 and bl33 image info structures. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 135 | * @bl32: BL32 image info structure. |
| 136 | * @bl33: BL33 image info structure. |
| 137 | * @tfa_handoff_addr: TF-A handoff address. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 138 | * |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 139 | * Process the handoff parameters from the XBL and populate the BL32 and BL33 |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 140 | * image info structures accordingly. |
Siva Durga Prasad Paladugu | 8f49972 | 2018-05-17 15:17:46 +0530 | [diff] [blame] | 141 | * |
| 142 | * Return: Return the status of the handoff. The value will be from the |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 143 | * xbl_handoff enum. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 144 | * |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 145 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 146 | enum xbl_handoff xbl_handover(entry_point_info_t *bl32, |
Venkatesh Yadav Abbarapu | 1463dd5 | 2020-01-07 03:25:16 -0700 | [diff] [blame] | 147 | entry_point_info_t *bl33, |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 148 | uint64_t handoff_addr) |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 149 | { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 150 | const struct xbl_handoff_params *HandoffParams; |
| 151 | |
| 152 | if (!handoff_addr) { |
| 153 | WARN("BL31: No handoff structure passed\n"); |
| 154 | return XBL_HANDOFF_NO_STRUCT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 155 | } |
| 156 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 157 | HandoffParams = (struct xbl_handoff_params *)handoff_addr; |
| 158 | if ((HandoffParams->magic[0] != 'X') || |
| 159 | (HandoffParams->magic[1] != 'L') || |
| 160 | (HandoffParams->magic[2] != 'N') || |
| 161 | (HandoffParams->magic[3] != 'X')) { |
| 162 | ERROR("BL31: invalid handoff structure at %" PRIx64 "\n", handoff_addr); |
| 163 | return XBL_HANDOFF_INVAL_STRUCT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 164 | } |
| 165 | |
Prasad Kummari | e078311 | 2023-04-26 11:02:07 +0530 | [diff] [blame] | 166 | VERBOSE("BL31: TF-A handoff params at:0x%" PRIx64 ", entries:%u\n", |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 167 | handoff_addr, HandoffParams->num_entries); |
| 168 | if (HandoffParams->num_entries > XBL_MAX_PARTITIONS) { |
Prasad Kummari | e078311 | 2023-04-26 11:02:07 +0530 | [diff] [blame] | 169 | ERROR("BL31: TF-A handoff params: too many partitions (%u/%u)\n", |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 170 | HandoffParams->num_entries, XBL_MAX_PARTITIONS); |
| 171 | return XBL_HANDOFF_TOO_MANY_PARTS; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 172 | } |
| 173 | |
| 174 | /* |
| 175 | * we loop over all passed entries but only populate two image structs |
| 176 | * (bl32, bl33). I.e. the last applicable images in the handoff |
| 177 | * structure will be used for the hand off |
| 178 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 179 | for (size_t i = 0; i < HandoffParams->num_entries; i++) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 180 | entry_point_info_t *image; |
Venkatesh Yadav Abbarapu | a2ca35d | 2022-07-04 11:40:27 +0530 | [diff] [blame] | 181 | int32_t target_estate, target_secure, target_cpu; |
| 182 | uint32_t target_endianness, target_el; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 183 | |
Scott Branden | e5dcf98 | 2020-08-25 13:49:32 -0700 | [diff] [blame] | 184 | VERBOSE("BL31: %zd: entry:0x%" PRIx64 ", flags:0x%" PRIx64 "\n", i, |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 185 | HandoffParams->partition[i].entry_point, |
| 186 | HandoffParams->partition[i].flags); |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 187 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 188 | target_cpu = get_xbl_cpu(&HandoffParams->partition[i]); |
| 189 | if (target_cpu != XBL_FLAGS_A53_0) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 190 | WARN("BL31: invalid target CPU (%i)\n", target_cpu); |
| 191 | continue; |
| 192 | } |
| 193 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 194 | target_el = get_xbl_el(&HandoffParams->partition[i]); |
| 195 | if ((target_el == XBL_FLAGS_EL3) || |
| 196 | (target_el == XBL_FLAGS_EL0)) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 197 | WARN("BL31: invalid exception level (%i)\n", target_el); |
| 198 | continue; |
| 199 | } |
| 200 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 201 | target_secure = get_xbl_ss(&HandoffParams->partition[i]); |
| 202 | if (target_secure == XBL_FLAGS_SECURE && |
| 203 | target_el == XBL_FLAGS_EL2) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 204 | WARN("BL31: invalid security state (%i) for exception level (%i)\n", |
| 205 | target_secure, target_el); |
| 206 | continue; |
| 207 | } |
| 208 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 209 | target_estate = get_xbl_estate(&HandoffParams->partition[i]); |
| 210 | target_endianness = get_xbl_endian(&HandoffParams->partition[i]); |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 211 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 212 | if (target_secure == XBL_FLAGS_SECURE) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 213 | image = bl32; |
| 214 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 215 | if (target_estate == XBL_FLAGS_ESTATE_A32) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 216 | bl32->spsr = SPSR_MODE32(MODE32_svc, SPSR_T_ARM, |
Soren Brinkmann | 8bcd305 | 2016-05-29 09:48:26 -0700 | [diff] [blame] | 217 | target_endianness, |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 218 | DISABLE_ALL_EXCEPTIONS); |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 219 | } else { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 220 | bl32->spsr = SPSR_64(MODE_EL1, MODE_SP_ELX, |
| 221 | DISABLE_ALL_EXCEPTIONS); |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 222 | } |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 223 | } else { |
| 224 | image = bl33; |
| 225 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 226 | if (target_estate == XBL_FLAGS_ESTATE_A32) { |
| 227 | if (target_el == XBL_FLAGS_EL2) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 228 | target_el = MODE32_hyp; |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 229 | } else { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 230 | target_el = MODE32_sys; |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 231 | } |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 232 | |
| 233 | bl33->spsr = SPSR_MODE32(target_el, SPSR_T_ARM, |
Soren Brinkmann | 8bcd305 | 2016-05-29 09:48:26 -0700 | [diff] [blame] | 234 | target_endianness, |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 235 | DISABLE_ALL_EXCEPTIONS); |
| 236 | } else { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 237 | if (target_el == XBL_FLAGS_EL2) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 238 | target_el = MODE_EL2; |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 239 | } else { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 240 | target_el = MODE_EL1; |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 241 | } |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 242 | |
| 243 | bl33->spsr = SPSR_64(target_el, MODE_SP_ELX, |
| 244 | DISABLE_ALL_EXCEPTIONS); |
| 245 | } |
| 246 | } |
| 247 | |
Scott Branden | e5dcf98 | 2020-08-25 13:49:32 -0700 | [diff] [blame] | 248 | VERBOSE("Setting up %s entry point to:%" PRIx64 ", el:%x\n", |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 249 | target_secure == XBL_FLAGS_SECURE ? "BL32" : "BL33", |
| 250 | HandoffParams->partition[i].entry_point, |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 251 | target_el); |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 252 | image->pc = HandoffParams->partition[i].entry_point; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 253 | |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 254 | if (target_endianness == SPSR_E_BIG) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 255 | EP_SET_EE(image->h.attr, EP_EE_BIG); |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 256 | } else { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 257 | EP_SET_EE(image->h.attr, EP_EE_LITTLE); |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 258 | } |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 259 | } |
Siva Durga Prasad Paladugu | 8f49972 | 2018-05-17 15:17:46 +0530 | [diff] [blame] | 260 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame^] | 261 | return XBL_HANDOFF_SUCCESS; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 262 | } |