Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 1 | /* |
Michal Simek | 2a47faa | 2023-04-14 08:43:51 +0200 | [diff] [blame] | 2 | * Copyright (c) 2014-2020, Arm Limited and Contributors. All rights reserved. |
Prasad Kummari | e078311 | 2023-04-26 11:02:07 +0530 | [diff] [blame] | 3 | * Copyright (c) 2023, Advanced Micro Devices, Inc. All rights reserved. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 4 | * |
dp-arm | fa3cf0b | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 5 | * SPDX-License-Identifier: BSD-3-Clause |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 6 | */ |
| 7 | |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 8 | #include <assert.h> |
Scott Branden | e5dcf98 | 2020-08-25 13:49:32 -0700 | [diff] [blame] | 9 | #include <inttypes.h> |
| 10 | #include <stdint.h> |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 11 | |
| 12 | #include <arch_helpers.h> |
| 13 | #include <common/debug.h> |
Venkatesh Yadav Abbarapu | 1463dd5 | 2020-01-07 03:25:16 -0700 | [diff] [blame] | 14 | #include <plat_startup.h> |
Antonio Nino Diaz | e0f9063 | 2018-12-14 00:18:21 +0000 | [diff] [blame] | 15 | |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 16 | |
| 17 | /* |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 18 | * HandoffParams |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 19 | * Parameter bitfield encoding |
| 20 | * ----------------------------------------------------------------------------- |
| 21 | * Exec State 0 0 -> Aarch64, 1-> Aarch32 |
Soren Brinkmann | 8bcd305 | 2016-05-29 09:48:26 -0700 | [diff] [blame] | 22 | * endianness 1 0 -> LE, 1 -> BE |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 23 | * secure (TZ) 2 0 -> Non secure, 1 -> secure |
| 24 | * EL 3:4 00 -> EL0, 01 -> EL1, 10 -> EL2, 11 -> EL3 |
| 25 | * CPU# 5:6 00 -> A53_0, 01 -> A53_1, 10 -> A53_2, 11 -> A53_3 |
Akshay Belsare | 29ffe14 | 2023-06-15 16:48:46 +0530 | [diff] [blame] | 26 | * Reserved 7:10 Reserved |
| 27 | * Cluster# 11:12 00 -> Cluster 0, 01 -> Cluster 1, 10 -> Cluster 2, |
| 28 | * 11 -> Cluster (Applicable for Versal NET only). |
| 29 | * Reserved 13:16 Reserved |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 30 | */ |
| 31 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 32 | #define XBL_FLAGS_ESTATE_SHIFT 0U |
| 33 | #define XBL_FLAGS_ESTATE_MASK (1U << XBL_FLAGS_ESTATE_SHIFT) |
| 34 | #define XBL_FLAGS_ESTATE_A64 0U |
| 35 | #define XBL_FLAGS_ESTATE_A32 1U |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 36 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 37 | #define XBL_FLAGS_ENDIAN_SHIFT 1U |
| 38 | #define XBL_FLAGS_ENDIAN_MASK (1U << XBL_FLAGS_ENDIAN_SHIFT) |
| 39 | #define XBL_FLAGS_ENDIAN_LE 0U |
| 40 | #define XBL_FLAGS_ENDIAN_BE 1U |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 41 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 42 | #define XBL_FLAGS_TZ_SHIFT 2U |
| 43 | #define XBL_FLAGS_TZ_MASK (1U << XBL_FLAGS_TZ_SHIFT) |
| 44 | #define XBL_FLAGS_NON_SECURE 0U |
| 45 | #define XBL_FLAGS_SECURE 1U |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 46 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 47 | #define XBL_FLAGS_EL_SHIFT 3U |
| 48 | #define XBL_FLAGS_EL_MASK (3U << XBL_FLAGS_EL_SHIFT) |
| 49 | #define XBL_FLAGS_EL0 0U |
| 50 | #define XBL_FLAGS_EL1 1U |
| 51 | #define XBL_FLAGS_EL2 2U |
| 52 | #define XBL_FLAGS_EL3 3U |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 53 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 54 | #define XBL_FLAGS_CPU_SHIFT 5U |
| 55 | #define XBL_FLAGS_CPU_MASK (3U << XBL_FLAGS_CPU_SHIFT) |
| 56 | #define XBL_FLAGS_A53_0 0U |
| 57 | #define XBL_FLAGS_A53_1 1U |
| 58 | #define XBL_FLAGS_A53_2 2U |
| 59 | #define XBL_FLAGS_A53_3 3U |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 60 | |
Akshay Belsare | 29ffe14 | 2023-06-15 16:48:46 +0530 | [diff] [blame] | 61 | #if defined(PLAT_versal_net) |
| 62 | #define XBL_FLAGS_CLUSTER_SHIFT 11U |
| 63 | #define XBL_FLAGS_CLUSTER_MASK GENMASK(11, 12) |
| 64 | |
| 65 | #define XBL_FLAGS_CLUSTER_0 0U |
| 66 | #endif /* PLAT_versal_net */ |
| 67 | |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 68 | /** |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 69 | * get_xbl_cpu() - Get the target CPU for partition. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 70 | * @partition: Pointer to partition struct. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 71 | * |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 72 | * Return: XBL_FLAGS_A53_0, XBL_FLAGS_A53_1, XBL_FLAGS_A53_2 or XBL_FLAGS_A53_3. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 73 | * |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 74 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 75 | static int32_t get_xbl_cpu(const struct xbl_partition *partition) |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 76 | { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 77 | uint64_t flags = partition->flags & XBL_FLAGS_CPU_MASK; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 78 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 79 | return flags >> XBL_FLAGS_CPU_SHIFT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 80 | } |
| 81 | |
| 82 | /** |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 83 | * get_xbl_el() - Get the target exception level for partition. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 84 | * @partition: Pointer to partition struct. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 85 | * |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 86 | * Return: XBL_FLAGS_EL0, XBL_FLAGS_EL1, XBL_FLAGS_EL2 or XBL_FLAGS_EL3. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 87 | * |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 88 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 89 | static int32_t get_xbl_el(const struct xbl_partition *partition) |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 90 | { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 91 | uint64_t flags = partition->flags & XBL_FLAGS_EL_MASK; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 92 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 93 | return flags >> XBL_FLAGS_EL_SHIFT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 94 | } |
| 95 | |
| 96 | /** |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 97 | * get_xbl_ss() - Get the target security state for partition. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 98 | * @partition: Pointer to partition struct. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 99 | * |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 100 | * Return: XBL_FLAGS_NON_SECURE or XBL_FLAGS_SECURE. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 101 | * |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 102 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 103 | static int32_t get_xbl_ss(const struct xbl_partition *partition) |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 104 | { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 105 | uint64_t flags = partition->flags & XBL_FLAGS_TZ_MASK; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 106 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 107 | return flags >> XBL_FLAGS_TZ_SHIFT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 108 | } |
| 109 | |
| 110 | /** |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 111 | * get_xbl_endian() - Get the target endianness for partition. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 112 | * @partition: Pointer to partition struct. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 113 | * |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 114 | * Return: SPSR_E_LITTLE or SPSR_E_BIG. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 115 | * |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 116 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 117 | static int32_t get_xbl_endian(const struct xbl_partition *partition) |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 118 | { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 119 | uint64_t flags = partition->flags & XBL_FLAGS_ENDIAN_MASK; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 120 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 121 | flags >>= XBL_FLAGS_ENDIAN_SHIFT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 122 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 123 | if (flags == XBL_FLAGS_ENDIAN_BE) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 124 | return SPSR_E_BIG; |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 125 | } else { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 126 | return SPSR_E_LITTLE; |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 127 | } |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 128 | } |
| 129 | |
| 130 | /** |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 131 | * get_xbl_estate() - Get the target execution state for partition. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 132 | * @partition: Pointer to partition struct. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 133 | * |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 134 | * Return: XBL_FLAGS_ESTATE_A32 or XBL_FLAGS_ESTATE_A64. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 135 | * |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 136 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 137 | static int32_t get_xbl_estate(const struct xbl_partition *partition) |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 138 | { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 139 | uint64_t flags = partition->flags & XBL_FLAGS_ESTATE_MASK; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 140 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 141 | return flags >> XBL_FLAGS_ESTATE_SHIFT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 142 | } |
| 143 | |
Akshay Belsare | 29ffe14 | 2023-06-15 16:48:46 +0530 | [diff] [blame] | 144 | #if defined(PLAT_versal_net) |
| 145 | /** |
| 146 | * get_xbl_cluster - Get the cluster number |
| 147 | * @partition: pointer to the partition structure. |
| 148 | * |
| 149 | * Return: cluster number for the partition. |
| 150 | */ |
| 151 | static int32_t get_xbl_cluster(const struct xbl_partition *partition) |
| 152 | { |
| 153 | uint64_t flags = partition->flags & XBL_FLAGS_CLUSTER_MASK; |
| 154 | |
| 155 | return (int32_t)(flags >> XBL_FLAGS_CLUSTER_SHIFT); |
| 156 | } |
| 157 | #endif /* PLAT_versal_net */ |
| 158 | |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 159 | /** |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 160 | * xbl_tfa_handover() - Populates the bl32 and bl33 image info structures. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 161 | * @bl32: BL32 image info structure. |
| 162 | * @bl33: BL33 image info structure. |
| 163 | * @tfa_handoff_addr: TF-A handoff address. |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 164 | * |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 165 | * Process the handoff parameters from the XBL and populate the BL32 and BL33 |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 166 | * image info structures accordingly. |
Siva Durga Prasad Paladugu | 8f49972 | 2018-05-17 15:17:46 +0530 | [diff] [blame] | 167 | * |
| 168 | * Return: Return the status of the handoff. The value will be from the |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 169 | * xbl_handoff enum. |
Prasad Kummari | 7d0623a | 2023-06-09 14:32:00 +0530 | [diff] [blame] | 170 | * |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 171 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 172 | enum xbl_handoff xbl_handover(entry_point_info_t *bl32, |
Venkatesh Yadav Abbarapu | 1463dd5 | 2020-01-07 03:25:16 -0700 | [diff] [blame] | 173 | entry_point_info_t *bl33, |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 174 | uint64_t handoff_addr) |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 175 | { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 176 | const struct xbl_handoff_params *HandoffParams; |
| 177 | |
| 178 | if (!handoff_addr) { |
| 179 | WARN("BL31: No handoff structure passed\n"); |
| 180 | return XBL_HANDOFF_NO_STRUCT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 181 | } |
| 182 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 183 | HandoffParams = (struct xbl_handoff_params *)handoff_addr; |
| 184 | if ((HandoffParams->magic[0] != 'X') || |
| 185 | (HandoffParams->magic[1] != 'L') || |
| 186 | (HandoffParams->magic[2] != 'N') || |
| 187 | (HandoffParams->magic[3] != 'X')) { |
| 188 | ERROR("BL31: invalid handoff structure at %" PRIx64 "\n", handoff_addr); |
| 189 | return XBL_HANDOFF_INVAL_STRUCT; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 190 | } |
| 191 | |
Prasad Kummari | e078311 | 2023-04-26 11:02:07 +0530 | [diff] [blame] | 192 | VERBOSE("BL31: TF-A handoff params at:0x%" PRIx64 ", entries:%u\n", |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 193 | handoff_addr, HandoffParams->num_entries); |
| 194 | if (HandoffParams->num_entries > XBL_MAX_PARTITIONS) { |
Prasad Kummari | e078311 | 2023-04-26 11:02:07 +0530 | [diff] [blame] | 195 | ERROR("BL31: TF-A handoff params: too many partitions (%u/%u)\n", |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 196 | HandoffParams->num_entries, XBL_MAX_PARTITIONS); |
| 197 | return XBL_HANDOFF_TOO_MANY_PARTS; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 198 | } |
| 199 | |
| 200 | /* |
| 201 | * we loop over all passed entries but only populate two image structs |
| 202 | * (bl32, bl33). I.e. the last applicable images in the handoff |
| 203 | * structure will be used for the hand off |
| 204 | */ |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 205 | for (size_t i = 0; i < HandoffParams->num_entries; i++) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 206 | entry_point_info_t *image; |
Venkatesh Yadav Abbarapu | a2ca35d | 2022-07-04 11:40:27 +0530 | [diff] [blame] | 207 | int32_t target_estate, target_secure, target_cpu; |
| 208 | uint32_t target_endianness, target_el; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 209 | |
Scott Branden | e5dcf98 | 2020-08-25 13:49:32 -0700 | [diff] [blame] | 210 | VERBOSE("BL31: %zd: entry:0x%" PRIx64 ", flags:0x%" PRIx64 "\n", i, |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 211 | HandoffParams->partition[i].entry_point, |
| 212 | HandoffParams->partition[i].flags); |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 213 | |
Akshay Belsare | 29ffe14 | 2023-06-15 16:48:46 +0530 | [diff] [blame] | 214 | #if defined(PLAT_versal_net) |
| 215 | uint32_t target_cluster; |
| 216 | |
| 217 | target_cluster = get_xbl_cluster(&HandoffParams->partition[i]); |
| 218 | if (target_cluster != XBL_FLAGS_CLUSTER_0) { |
| 219 | WARN("BL31: invalid target Cluster (%i)\n", |
| 220 | target_cluster); |
| 221 | continue; |
| 222 | } |
| 223 | #endif /* PLAT_versal_net */ |
| 224 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 225 | target_cpu = get_xbl_cpu(&HandoffParams->partition[i]); |
| 226 | if (target_cpu != XBL_FLAGS_A53_0) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 227 | WARN("BL31: invalid target CPU (%i)\n", target_cpu); |
| 228 | continue; |
| 229 | } |
| 230 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 231 | target_el = get_xbl_el(&HandoffParams->partition[i]); |
| 232 | if ((target_el == XBL_FLAGS_EL3) || |
| 233 | (target_el == XBL_FLAGS_EL0)) { |
Akshay Belsare | d66a789 | 2023-06-22 11:57:18 +0530 | [diff] [blame] | 234 | WARN("BL31: invalid target exception level(%i)\n", |
| 235 | target_el); |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 236 | continue; |
| 237 | } |
| 238 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 239 | target_secure = get_xbl_ss(&HandoffParams->partition[i]); |
| 240 | if (target_secure == XBL_FLAGS_SECURE && |
| 241 | target_el == XBL_FLAGS_EL2) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 242 | WARN("BL31: invalid security state (%i) for exception level (%i)\n", |
| 243 | target_secure, target_el); |
| 244 | continue; |
| 245 | } |
| 246 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 247 | target_estate = get_xbl_estate(&HandoffParams->partition[i]); |
| 248 | target_endianness = get_xbl_endian(&HandoffParams->partition[i]); |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 249 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 250 | if (target_secure == XBL_FLAGS_SECURE) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 251 | image = bl32; |
| 252 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 253 | if (target_estate == XBL_FLAGS_ESTATE_A32) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 254 | bl32->spsr = SPSR_MODE32(MODE32_svc, SPSR_T_ARM, |
Soren Brinkmann | 8bcd305 | 2016-05-29 09:48:26 -0700 | [diff] [blame] | 255 | target_endianness, |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 256 | DISABLE_ALL_EXCEPTIONS); |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 257 | } else { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 258 | bl32->spsr = SPSR_64(MODE_EL1, MODE_SP_ELX, |
| 259 | DISABLE_ALL_EXCEPTIONS); |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 260 | } |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 261 | } else { |
| 262 | image = bl33; |
| 263 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 264 | if (target_estate == XBL_FLAGS_ESTATE_A32) { |
| 265 | if (target_el == XBL_FLAGS_EL2) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 266 | target_el = MODE32_hyp; |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 267 | } else { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 268 | target_el = MODE32_sys; |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 269 | } |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 270 | |
| 271 | bl33->spsr = SPSR_MODE32(target_el, SPSR_T_ARM, |
Soren Brinkmann | 8bcd305 | 2016-05-29 09:48:26 -0700 | [diff] [blame] | 272 | target_endianness, |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 273 | DISABLE_ALL_EXCEPTIONS); |
| 274 | } else { |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 275 | if (target_el == XBL_FLAGS_EL2) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 276 | target_el = MODE_EL2; |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 277 | } else { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 278 | target_el = MODE_EL1; |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 279 | } |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 280 | |
| 281 | bl33->spsr = SPSR_64(target_el, MODE_SP_ELX, |
| 282 | DISABLE_ALL_EXCEPTIONS); |
| 283 | } |
| 284 | } |
| 285 | |
Scott Branden | e5dcf98 | 2020-08-25 13:49:32 -0700 | [diff] [blame] | 286 | VERBOSE("Setting up %s entry point to:%" PRIx64 ", el:%x\n", |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 287 | target_secure == XBL_FLAGS_SECURE ? "BL32" : "BL33", |
| 288 | HandoffParams->partition[i].entry_point, |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 289 | target_el); |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 290 | image->pc = HandoffParams->partition[i].entry_point; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 291 | |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 292 | if (target_endianness == SPSR_E_BIG) { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 293 | EP_SET_EE(image->h.attr, EP_EE_BIG); |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 294 | } else { |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 295 | EP_SET_EE(image->h.attr, EP_EE_LITTLE); |
Venkatesh Yadav Abbarapu | 987fad3 | 2022-04-29 13:52:00 +0530 | [diff] [blame] | 296 | } |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 297 | } |
Siva Durga Prasad Paladugu | 8f49972 | 2018-05-17 15:17:46 +0530 | [diff] [blame] | 298 | |
Prasad Kummari | 07795fa | 2023-06-08 21:36:38 +0530 | [diff] [blame] | 299 | return XBL_HANDOFF_SUCCESS; |
Michal Simek | ef8f559 | 2015-06-15 14:22:50 +0200 | [diff] [blame] | 300 | } |