blob: a2d9b7bf6c7edca8c10abe9ce5e5cf27dea265a4 [file] [log] [blame]
Soby Mathewec8ac1c2016-05-05 14:32:05 +01001/*
Chris Kay33bfc5e2023-02-14 11:30:04 +00002 * Copyright (c) 2016-2023, Arm Limited and Contributors. All rights reserved.
Soby Mathewec8ac1c2016-05-05 14:32:05 +01003 *
dp-armfa3cf0b2017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Soby Mathewec8ac1c2016-05-05 14:32:05 +01005 */
6
Masahiro Yamada0b67e562020-03-09 17:39:48 +09007#include <common/bl_common.ld.h>
Antonio Nino Diaze0f90632018-12-14 00:18:21 +00008#include <lib/xlat_tables/xlat_tables_defs.h>
Soby Mathewec8ac1c2016-05-05 14:32:05 +01009
10OUTPUT_FORMAT(elf32-littlearm)
11OUTPUT_ARCH(arm)
12ENTRY(sp_min_vector_table)
13
14MEMORY {
15 RAM (rwx): ORIGIN = BL32_BASE, LENGTH = BL32_LIMIT - BL32_BASE
16}
17
Heiko Stuebner95ba3552019-04-11 15:26:07 +020018#ifdef PLAT_SP_MIN_EXTRA_LD_SCRIPT
Chris Kay4b7660a2022-09-29 14:36:53 +010019# include <plat_sp_min.ld.S>
20#endif /* PLAT_SP_MIN_EXTRA_LD_SCRIPT */
Soby Mathewec8ac1c2016-05-05 14:32:05 +010021
Chris Kay4b7660a2022-09-29 14:36:53 +010022SECTIONS {
Harrison Mutaic9f96d12023-04-19 09:30:15 +010023 RAM_REGION_START = ORIGIN(RAM);
24 RAM_REGION_LENGTH = LENGTH(RAM);
Soby Mathewec8ac1c2016-05-05 14:32:05 +010025 . = BL32_BASE;
Chris Kay4b7660a2022-09-29 14:36:53 +010026
Yann Gautier876be652020-10-05 09:54:09 +020027 ASSERT(. == ALIGN(PAGE_SIZE),
Chris Kay4b7660a2022-09-29 14:36:53 +010028 "BL32_BASE address is not aligned on a page boundary.")
Soby Mathewec8ac1c2016-05-05 14:32:05 +010029
30#if SEPARATE_CODE_AND_RODATA
31 .text . : {
Andrey Skvortsov08526ad2023-09-05 22:09:25 +030032 ASSERT(. == ALIGN(PAGE_SIZE),
33 ".text address is not aligned on a page boundary.");
34
Soby Mathewec8ac1c2016-05-05 14:32:05 +010035 __TEXT_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010036
Soby Mathewec8ac1c2016-05-05 14:32:05 +010037 *entrypoint.o(.text*)
Yann Gautier876be652020-10-05 09:54:09 +020038 *(SORT_BY_ALIGNMENT(.text*))
Yatharth Kochar06460cd2016-06-30 15:02:31 +010039 *(.vectors)
Michal Simek80c530e2023-04-27 14:26:03 +020040 __TEXT_END_UNALIGNED__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010041
Roberto Vargasd93fde32018-04-11 11:53:31 +010042 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010043
Soby Mathewec8ac1c2016-05-05 14:32:05 +010044 __TEXT_END__ = .;
45 } >RAM
46
Chris Kay4b7660a2022-09-29 14:36:53 +010047 /* .ARM.extab and .ARM.exidx are only added because Clang needs them */
48 .ARM.extab . : {
Roberto Vargas1d04c632018-05-10 11:01:16 +010049 *(.ARM.extab* .gnu.linkonce.armextab.*)
Chris Kay4b7660a2022-09-29 14:36:53 +010050 } >RAM
Roberto Vargas1d04c632018-05-10 11:01:16 +010051
Chris Kay4b7660a2022-09-29 14:36:53 +010052 .ARM.exidx . : {
Roberto Vargas1d04c632018-05-10 11:01:16 +010053 *(.ARM.exidx* .gnu.linkonce.armexidx.*)
Chris Kay4b7660a2022-09-29 14:36:53 +010054 } >RAM
Roberto Vargas1d04c632018-05-10 11:01:16 +010055
Soby Mathewec8ac1c2016-05-05 14:32:05 +010056 .rodata . : {
57 __RODATA_START__ = .;
Yann Gautier876be652020-10-05 09:54:09 +020058 *(SORT_BY_ALIGNMENT(.rodata*))
Soby Mathewec8ac1c2016-05-05 14:32:05 +010059
Chris Kay4b7660a2022-09-29 14:36:53 +010060 RODATA_COMMON
Soby Mathewec8ac1c2016-05-05 14:32:05 +010061
Jeenu Viswambharane3f22002017-09-22 08:32:10 +010062 . = ALIGN(8);
Chris Kay4b7660a2022-09-29 14:36:53 +010063
64# include <lib/el3_runtime/pubsub_events.h>
Michal Simek80c530e2023-04-27 14:26:03 +020065 __RODATA_END_UNALIGNED__ = .;
Jeenu Viswambharane3f22002017-09-22 08:32:10 +010066
Roberto Vargasd93fde32018-04-11 11:53:31 +010067 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010068
Soby Mathewec8ac1c2016-05-05 14:32:05 +010069 __RODATA_END__ = .;
70 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +010071#else /* SEPARATE_CODE_AND_RODATA */
Chris Kay33bfc5e2023-02-14 11:30:04 +000072 .ro . : {
Andrey Skvortsov08526ad2023-09-05 22:09:25 +030073 ASSERT(. == ALIGN(PAGE_SIZE),
74 ".ro address is not aligned on a page boundary.");
75
Soby Mathewec8ac1c2016-05-05 14:32:05 +010076 __RO_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +010077
Soby Mathewec8ac1c2016-05-05 14:32:05 +010078 *entrypoint.o(.text*)
Yann Gautier876be652020-10-05 09:54:09 +020079 *(SORT_BY_ALIGNMENT(.text*))
80 *(SORT_BY_ALIGNMENT(.rodata*))
Soby Mathewec8ac1c2016-05-05 14:32:05 +010081
Chris Kay4b7660a2022-09-29 14:36:53 +010082 RODATA_COMMON
Soby Mathewec8ac1c2016-05-05 14:32:05 +010083
Jeenu Viswambharane3f22002017-09-22 08:32:10 +010084 . = ALIGN(8);
Chris Kay4b7660a2022-09-29 14:36:53 +010085
86# include <lib/el3_runtime/pubsub_events.h>
Jeenu Viswambharane3f22002017-09-22 08:32:10 +010087
Yatharth Kochar06460cd2016-06-30 15:02:31 +010088 *(.vectors)
Chris Kay4b7660a2022-09-29 14:36:53 +010089
Soby Mathewec8ac1c2016-05-05 14:32:05 +010090 __RO_END_UNALIGNED__ = .;
91
92 /*
Chris Kay4b7660a2022-09-29 14:36:53 +010093 * Memory page(s) mapped to this section will be marked as device
94 * memory. No other unexpected data must creep in. Ensure that the rest
95 * of the current memory page is unused.
Soby Mathewec8ac1c2016-05-05 14:32:05 +010096 */
Roberto Vargasd93fde32018-04-11 11:53:31 +010097 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +010098
Soby Mathewec8ac1c2016-05-05 14:32:05 +010099 __RO_END__ = .;
100 } >RAM
Chris Kay4b7660a2022-09-29 14:36:53 +0100101#endif /* SEPARATE_CODE_AND_RODATA */
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100102
103 ASSERT(__CPU_OPS_END__ > __CPU_OPS_START__,
Chris Kay4b7660a2022-09-29 14:36:53 +0100104 "cpu_ops not defined for this platform.")
105
106 __RW_START__ = .;
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100107
Masahiro Yamadac5864d82020-04-22 10:50:12 +0900108 DATA_SECTION >RAM
Yann Gautier514e59c2020-10-05 11:02:54 +0200109 RELA_SECTION >RAM
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100110
Soby Mathewbf169232017-11-14 14:10:10 +0000111#ifdef BL32_PROGBITS_LIMIT
112 ASSERT(. <= BL32_PROGBITS_LIMIT, "BL32 progbits has exceeded its limit.")
Chris Kay4b7660a2022-09-29 14:36:53 +0100113#endif /* BL32_PROGBITS_LIMIT */
Soby Mathewbf169232017-11-14 14:10:10 +0000114
Masahiro Yamada403990e2020-04-07 13:04:24 +0900115 STACK_SECTION >RAM
Masahiro Yamadadd053b62020-03-26 13:16:33 +0900116 BSS_SECTION >RAM
Masahiro Yamada0b67e562020-03-09 17:39:48 +0900117 XLAT_TABLE_SECTION >RAM
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100118
Chris Kay4b7660a2022-09-29 14:36:53 +0100119 __BSS_SIZE__ = SIZEOF(.bss);
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100120
121#if USE_COHERENT_MEM
122 /*
Chris Kay4b7660a2022-09-29 14:36:53 +0100123 * The base address of the coherent memory section must be page-aligned to
124 * guarantee that the coherent data are stored on their own pages and are
125 * not mixed with normal data. This is required to set up the correct
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100126 * memory attributes for the coherent data page tables.
127 */
Chris Kay33bfc5e2023-02-14 11:30:04 +0000128 .coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) {
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100129 __COHERENT_RAM_START__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +0100130
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100131 /*
Chris Kay4b7660a2022-09-29 14:36:53 +0100132 * Bakery locks are stored in coherent memory. Each lock's data is
133 * contiguous and fully allocated by the compiler.
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100134 */
Chris Kay33bfc5e2023-02-14 11:30:04 +0000135 *(.bakery_lock)
136 *(.tzfw_coherent_mem)
Chris Kay4b7660a2022-09-29 14:36:53 +0100137
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100138 __COHERENT_RAM_END_UNALIGNED__ = .;
Chris Kay4b7660a2022-09-29 14:36:53 +0100139
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100140 /*
Chris Kay4b7660a2022-09-29 14:36:53 +0100141 * Memory page(s) mapped to this section will be marked as device
142 * memory. No other unexpected data must creep in. Ensure that the rest
143 * of the current memory page is unused.
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100144 */
Roberto Vargasd93fde32018-04-11 11:53:31 +0100145 . = ALIGN(PAGE_SIZE);
Chris Kay4b7660a2022-09-29 14:36:53 +0100146
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100147 __COHERENT_RAM_END__ = .;
148 } >RAM
149
150 __COHERENT_RAM_UNALIGNED_SIZE__ =
151 __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
Chris Kay4b7660a2022-09-29 14:36:53 +0100152#endif /* USE_COHERENT_MEM */
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100153
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100154 __RW_END__ = .;
Yann Gautier876be652020-10-05 09:54:09 +0200155 __BL32_END__ = .;
Yann Gautier1b4d6ae2020-10-05 11:39:19 +0200156
Yann Gautier514e59c2020-10-05 11:02:54 +0200157 /DISCARD/ : {
158 *(.dynsym .dynstr .hash .gnu.hash)
159 }
160
Yann Gautier1b4d6ae2020-10-05 11:39:19 +0200161 ASSERT(. <= BL32_LIMIT, "BL32 image has exceeded its limit.")
Harrison Mutaic9f96d12023-04-19 09:30:15 +0100162 RAM_REGION_END = .;
Soby Mathewec8ac1c2016-05-05 14:32:05 +0100163}